var/home/core/zuul-output/0000755000175000017500000000000015114547115014531 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114565207015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005405153115114565200017700 0ustar rootrootDec 05 12:25:19 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 12:25:19 crc restorecon[4686]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:19 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 12:25:20 crc restorecon[4686]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 12:25:20 crc kubenswrapper[4784]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.849645 4784 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852369 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852386 4784 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852393 4784 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852398 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852403 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852409 4784 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852416 4784 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852448 4784 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852454 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852459 4784 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852463 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852468 4784 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852472 4784 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852477 4784 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852481 4784 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852486 4784 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852490 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852494 4784 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852544 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852550 4784 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852556 4784 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852562 4784 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852567 4784 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852572 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852578 4784 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852583 4784 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852587 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852592 4784 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852596 4784 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852601 4784 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852606 4784 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852611 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852617 4784 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852622 4784 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852626 4784 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852633 4784 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852638 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852643 4784 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852648 4784 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852652 4784 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852657 4784 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852661 4784 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852666 4784 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852671 4784 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852675 4784 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852680 4784 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852684 4784 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852689 4784 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852694 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852698 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852703 4784 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852709 4784 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852714 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852718 4784 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852723 4784 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852727 4784 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852732 4784 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852737 4784 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852742 4784 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852746 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852752 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852757 4784 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852762 4784 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852766 4784 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852771 4784 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852776 4784 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852781 4784 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852786 4784 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852790 4784 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852795 4784 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.852799 4784 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853122 4784 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853137 4784 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853146 4784 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853154 4784 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853161 4784 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853166 4784 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853174 4784 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853181 4784 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853203 4784 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853209 4784 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853215 4784 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853220 4784 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853226 4784 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853231 4784 flags.go:64] FLAG: --cgroup-root="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853236 4784 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853241 4784 flags.go:64] FLAG: --client-ca-file="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853246 4784 flags.go:64] FLAG: --cloud-config="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853251 4784 flags.go:64] FLAG: --cloud-provider="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853257 4784 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853267 4784 flags.go:64] FLAG: --cluster-domain="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853272 4784 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853278 4784 flags.go:64] FLAG: --config-dir="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853283 4784 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853290 4784 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853297 4784 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853304 4784 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853309 4784 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853315 4784 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853320 4784 flags.go:64] FLAG: --contention-profiling="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853326 4784 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853331 4784 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853336 4784 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853342 4784 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853349 4784 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853354 4784 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853360 4784 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853365 4784 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853370 4784 flags.go:64] FLAG: --enable-server="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853375 4784 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853382 4784 flags.go:64] FLAG: --event-burst="100" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853390 4784 flags.go:64] FLAG: --event-qps="50" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853395 4784 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853401 4784 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853406 4784 flags.go:64] FLAG: --eviction-hard="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853414 4784 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853419 4784 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853425 4784 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853430 4784 flags.go:64] FLAG: --eviction-soft="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853435 4784 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853440 4784 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853447 4784 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853452 4784 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853457 4784 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853462 4784 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853468 4784 flags.go:64] FLAG: --feature-gates="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853476 4784 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853481 4784 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853487 4784 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853492 4784 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853498 4784 flags.go:64] FLAG: --healthz-port="10248" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853503 4784 flags.go:64] FLAG: --help="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853510 4784 flags.go:64] FLAG: --hostname-override="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853516 4784 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853521 4784 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853526 4784 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853531 4784 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853536 4784 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853542 4784 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853547 4784 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853551 4784 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853557 4784 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853562 4784 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853568 4784 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853574 4784 flags.go:64] FLAG: --kube-reserved="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853579 4784 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853584 4784 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853590 4784 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853594 4784 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853600 4784 flags.go:64] FLAG: --lock-file="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853605 4784 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853611 4784 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853616 4784 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853625 4784 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853630 4784 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853635 4784 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853640 4784 flags.go:64] FLAG: --logging-format="text" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853646 4784 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853651 4784 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853657 4784 flags.go:64] FLAG: --manifest-url="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853663 4784 flags.go:64] FLAG: --manifest-url-header="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853671 4784 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853676 4784 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853682 4784 flags.go:64] FLAG: --max-pods="110" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853688 4784 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853693 4784 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853698 4784 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853704 4784 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853710 4784 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853716 4784 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853722 4784 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853734 4784 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853739 4784 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853745 4784 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853751 4784 flags.go:64] FLAG: --pod-cidr="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853757 4784 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853765 4784 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853771 4784 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853777 4784 flags.go:64] FLAG: --pods-per-core="0" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853782 4784 flags.go:64] FLAG: --port="10250" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853788 4784 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853793 4784 flags.go:64] FLAG: --provider-id="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853798 4784 flags.go:64] FLAG: --qos-reserved="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853804 4784 flags.go:64] FLAG: --read-only-port="10255" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853810 4784 flags.go:64] FLAG: --register-node="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853816 4784 flags.go:64] FLAG: --register-schedulable="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853821 4784 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853831 4784 flags.go:64] FLAG: --registry-burst="10" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853836 4784 flags.go:64] FLAG: --registry-qps="5" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853842 4784 flags.go:64] FLAG: --reserved-cpus="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853847 4784 flags.go:64] FLAG: --reserved-memory="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853853 4784 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853859 4784 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853864 4784 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853869 4784 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853874 4784 flags.go:64] FLAG: --runonce="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853879 4784 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853885 4784 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853891 4784 flags.go:64] FLAG: --seccomp-default="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853896 4784 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853902 4784 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853907 4784 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853913 4784 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853918 4784 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853925 4784 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853930 4784 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853935 4784 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853941 4784 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853947 4784 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853952 4784 flags.go:64] FLAG: --system-cgroups="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853957 4784 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853966 4784 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853971 4784 flags.go:64] FLAG: --tls-cert-file="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853976 4784 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853984 4784 flags.go:64] FLAG: --tls-min-version="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.853994 4784 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854000 4784 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854005 4784 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854010 4784 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854016 4784 flags.go:64] FLAG: --v="2" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854023 4784 flags.go:64] FLAG: --version="false" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854030 4784 flags.go:64] FLAG: --vmodule="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854037 4784 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854042 4784 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854509 4784 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854521 4784 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854528 4784 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854534 4784 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854539 4784 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854544 4784 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854549 4784 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854554 4784 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854558 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854563 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854567 4784 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854572 4784 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854576 4784 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854580 4784 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854585 4784 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854591 4784 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854597 4784 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854602 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854606 4784 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854611 4784 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854615 4784 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854620 4784 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854624 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854635 4784 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854640 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854645 4784 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854649 4784 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854654 4784 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854659 4784 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854663 4784 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854668 4784 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854673 4784 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854678 4784 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854682 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854687 4784 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854691 4784 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854696 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854701 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854706 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854710 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854715 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854719 4784 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854723 4784 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854728 4784 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854733 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854737 4784 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854742 4784 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854746 4784 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854751 4784 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854755 4784 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854760 4784 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854764 4784 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854770 4784 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854775 4784 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854779 4784 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854786 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854790 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854795 4784 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854801 4784 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854807 4784 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854812 4784 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854819 4784 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854824 4784 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854830 4784 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854836 4784 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854841 4784 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854846 4784 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854851 4784 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854855 4784 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854860 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.854865 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.854872 4784 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.863607 4784 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.863655 4784 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863731 4784 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863740 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863745 4784 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863749 4784 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863753 4784 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863757 4784 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863761 4784 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863766 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863770 4784 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863775 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863780 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863783 4784 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863789 4784 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863794 4784 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863799 4784 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863803 4784 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863807 4784 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863811 4784 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863814 4784 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863819 4784 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863822 4784 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863826 4784 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863829 4784 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863833 4784 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863837 4784 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863841 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863845 4784 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863848 4784 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863852 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863857 4784 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863861 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863865 4784 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863869 4784 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863873 4784 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863877 4784 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863881 4784 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863885 4784 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863889 4784 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863893 4784 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863897 4784 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863901 4784 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863905 4784 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863911 4784 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863915 4784 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863918 4784 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863922 4784 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863926 4784 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863931 4784 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863935 4784 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863940 4784 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863945 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863949 4784 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863953 4784 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863957 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863961 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863964 4784 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863968 4784 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863972 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863976 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863981 4784 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863987 4784 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863991 4784 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863995 4784 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.863999 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864002 4784 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864007 4784 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864011 4784 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864014 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864018 4784 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864022 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864025 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.864033 4784 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864169 4784 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864217 4784 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864228 4784 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864241 4784 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864292 4784 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864297 4784 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864303 4784 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864309 4784 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864314 4784 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864319 4784 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864324 4784 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864328 4784 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864333 4784 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864337 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864342 4784 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864347 4784 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864352 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864356 4784 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864361 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864366 4784 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864371 4784 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864375 4784 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864380 4784 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864384 4784 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864389 4784 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864393 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864399 4784 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864404 4784 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864408 4784 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864413 4784 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864417 4784 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864422 4784 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864426 4784 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864431 4784 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864436 4784 feature_gate.go:330] unrecognized feature gate: Example Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864442 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864447 4784 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864453 4784 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864459 4784 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864463 4784 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864467 4784 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864472 4784 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864477 4784 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864481 4784 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864485 4784 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864488 4784 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864493 4784 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864496 4784 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864500 4784 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864535 4784 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864539 4784 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864544 4784 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864548 4784 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864551 4784 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864555 4784 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864559 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864563 4784 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864567 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864570 4784 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864574 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864577 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864581 4784 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864584 4784 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864588 4784 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864591 4784 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864595 4784 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864598 4784 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864603 4784 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864606 4784 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864610 4784 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.864613 4784 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.864619 4784 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.864805 4784 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.869300 4784 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.869389 4784 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.869871 4784 server.go:997] "Starting client certificate rotation" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.869902 4784 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.870434 4784 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-18 12:40:03.421862674 +0000 UTC Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.870534 4784 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1056h14m42.551332269s for next certificate rotation Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.879520 4784 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.881770 4784 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.890598 4784 log.go:25] "Validated CRI v1 runtime API" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.910527 4784 log.go:25] "Validated CRI v1 image API" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.911732 4784 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.914636 4784 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-12-20-37-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.914681 4784 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.928206 4784 manager.go:217] Machine: {Timestamp:2025-12-05 12:25:20.926947436 +0000 UTC m=+0.347014271 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:aac4a951-c40f-4b5f-a660-6c137757957c BootID:08035136-431a-41d0-879c-bf86d5af7e54 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:49:88:bf Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:49:88:bf Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:c4:76:e4 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:86:8a:f2 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:91:bb:e6 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:81:a3:f6 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:c2:4f:fb:2e:59:7f Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:ca:f8:d7:c1:eb Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.928447 4784 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.928638 4784 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.929534 4784 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.929783 4784 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.929834 4784 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.930076 4784 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.930090 4784 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.930291 4784 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.930333 4784 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.930724 4784 state_mem.go:36] "Initialized new in-memory state store" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.931668 4784 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.936283 4784 kubelet.go:418] "Attempting to sync node with API server" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.936338 4784 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.936370 4784 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.936390 4784 kubelet.go:324] "Adding apiserver pod source" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.936403 4784 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.938611 4784 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.939047 4784 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940269 4784 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.940336 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.940424 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.940676 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.940738 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940878 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940900 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940907 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940915 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940924 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940935 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940942 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940954 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940962 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940969 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940979 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.940985 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.942609 4784 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.943152 4784 server.go:1280] "Started kubelet" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.943360 4784 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.944097 4784 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.944121 4784 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.944709 4784 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 12:25:20 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.946693 4784 server.go:460] "Adding debug handlers to kubelet server" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.947167 4784 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.947241 4784 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.948076 4784 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.948107 4784 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.948144 4784 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 20:05:05.334543208 +0000 UTC Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.948227 4784 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 487h39m44.386324105s for next certificate rotation Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.948444 4784 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.948687 4784 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.949484 4784 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.949519 4784 factory.go:55] Registering systemd factory Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.949530 4784 factory.go:221] Registration of the systemd container factory successfully Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.949892 4784 factory.go:153] Registering CRI-O factory Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.949983 4784 factory.go:221] Registration of the crio container factory successfully Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.949881 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.950086 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.947748 4784 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.223:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e51530292da4c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:25:20.943127116 +0000 UTC m=+0.363193951,LastTimestamp:2025-12-05 12:25:20.943127116 +0000 UTC m=+0.363193951,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.950214 4784 factory.go:103] Registering Raw factory Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.950259 4784 manager.go:1196] Started watching for new ooms in manager Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.952138 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="200ms" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.954886 4784 manager.go:319] Starting recovery of all containers Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962301 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962357 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962371 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962381 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962392 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962401 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962410 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962419 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962430 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962440 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962449 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962459 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962469 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962479 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962503 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962512 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962521 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962532 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962543 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962555 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962566 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962576 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962587 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962618 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962633 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.962647 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964295 4784 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964324 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964341 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964353 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964387 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964415 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964423 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964433 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964447 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964461 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964469 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964479 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964489 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964497 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964506 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964538 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964554 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964562 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964571 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964591 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964605 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964614 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964621 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964630 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964640 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964649 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964657 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964669 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964679 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964688 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964697 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964712 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964721 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964730 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964739 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964748 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964759 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964770 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964780 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964790 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964801 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964812 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964824 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964836 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964847 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964859 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964870 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964882 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964894 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964904 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964915 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964925 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964934 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964943 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964951 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964959 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964968 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.964976 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965005 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965014 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965022 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965031 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965040 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965049 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965057 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965065 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965073 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965081 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965089 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965097 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965105 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965115 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965124 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965133 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965140 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965150 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965157 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965165 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965179 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965211 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965221 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965230 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965239 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965249 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965258 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965268 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965278 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965287 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965297 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965306 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965314 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965324 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965332 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965341 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965349 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965360 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965370 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965379 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965392 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965401 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965409 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965420 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965429 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965462 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965473 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965487 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965495 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965509 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965517 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965528 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965537 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965547 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965557 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965565 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965574 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965583 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965592 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965602 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965613 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965623 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965632 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965648 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965657 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965672 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965687 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965696 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965709 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965720 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965730 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965743 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965752 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965763 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965818 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965828 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965839 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965848 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965859 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965872 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965880 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965889 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965897 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965906 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965916 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965925 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965934 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965943 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965951 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965960 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965968 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965976 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965985 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.965993 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966001 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966010 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966019 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966027 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966035 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966045 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966054 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966065 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966074 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966084 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966093 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966102 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966116 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966124 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966133 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966145 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966158 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966170 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966181 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966208 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966222 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966234 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966246 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966258 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966268 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966276 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966284 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966292 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966300 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966307 4784 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966316 4784 reconstruct.go:97] "Volume reconstruction finished" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.966323 4784 reconciler.go:26] "Reconciler: start to sync state" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.980084 4784 manager.go:324] Recovery completed Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.989928 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.992863 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.992906 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.992916 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.993735 4784 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.993768 4784 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.993804 4784 state_mem.go:36] "Initialized new in-memory state store" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.995842 4784 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.997550 4784 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.997596 4784 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 12:25:20 crc kubenswrapper[4784]: I1205 12:25:20.997625 4784 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.997670 4784 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 12:25:20 crc kubenswrapper[4784]: W1205 12:25:20.998447 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:20 crc kubenswrapper[4784]: E1205 12:25:20.998521 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.007779 4784 policy_none.go:49] "None policy: Start" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.008740 4784 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.008775 4784 state_mem.go:35] "Initializing new in-memory state store" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.049238 4784 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.058345 4784 manager.go:334] "Starting Device Plugin manager" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.058415 4784 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.058429 4784 server.go:79] "Starting device plugin registration server" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.058881 4784 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.058932 4784 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.059136 4784 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.059454 4784 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.059472 4784 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.065400 4784 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.097990 4784 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.098120 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099337 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099448 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099831 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.099909 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100164 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100232 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100332 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100573 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.100628 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101498 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101548 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101601 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101725 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.101943 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102053 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102091 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102383 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102420 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102430 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102894 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102903 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102929 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.102940 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103066 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103240 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103291 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103792 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103826 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.103838 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.104017 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.104050 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.104052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.104089 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.104103 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.105109 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.105149 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.105163 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.153021 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="400ms" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.159070 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.160423 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.160453 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.160464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.160486 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.160939 4784 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.223:6443: connect: connection refused" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169203 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169248 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169280 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169296 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169313 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169333 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169356 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169386 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169514 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169543 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169572 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169609 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169640 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169668 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.169696 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270837 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270903 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270926 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270947 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270970 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.270990 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271011 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271070 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271052 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271107 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271153 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271173 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271180 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271182 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271228 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271246 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271301 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271316 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271279 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271435 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271473 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271499 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271479 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271544 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271575 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271602 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271619 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271643 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.271735 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.361768 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.363233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.363291 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.363301 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.363327 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.364228 4784 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.223:6443: connect: connection refused" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.431610 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.452558 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: W1205 12:25:21.469713 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-f2fbb185c196a1f91077741993bd6b75f68c7d915aeec3b8a53dd4ddad5ee607 WatchSource:0}: Error finding container f2fbb185c196a1f91077741993bd6b75f68c7d915aeec3b8a53dd4ddad5ee607: Status 404 returned error can't find the container with id f2fbb185c196a1f91077741993bd6b75f68c7d915aeec3b8a53dd4ddad5ee607 Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.483160 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.500279 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: W1205 12:25:21.504456 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-754e461a18855b2cc1f2f42aa6972825509877c5edda9582b0106a4179e5c44d WatchSource:0}: Error finding container 754e461a18855b2cc1f2f42aa6972825509877c5edda9582b0106a4179e5c44d: Status 404 returned error can't find the container with id 754e461a18855b2cc1f2f42aa6972825509877c5edda9582b0106a4179e5c44d Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.508219 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:21 crc kubenswrapper[4784]: W1205 12:25:21.513910 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-18afd3e75dd8b4051193318c21671095b9aeb812ff60fbf1a702cccf2b30eeb8 WatchSource:0}: Error finding container 18afd3e75dd8b4051193318c21671095b9aeb812ff60fbf1a702cccf2b30eeb8: Status 404 returned error can't find the container with id 18afd3e75dd8b4051193318c21671095b9aeb812ff60fbf1a702cccf2b30eeb8 Dec 05 12:25:21 crc kubenswrapper[4784]: W1205 12:25:21.527517 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-5b7f9a7f93194531857b0c64280abe25674a02a60583a4cd28c1a30fdfb980c5 WatchSource:0}: Error finding container 5b7f9a7f93194531857b0c64280abe25674a02a60583a4cd28c1a30fdfb980c5: Status 404 returned error can't find the container with id 5b7f9a7f93194531857b0c64280abe25674a02a60583a4cd28c1a30fdfb980c5 Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.554645 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="800ms" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.765230 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.766650 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.766695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.766712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.766739 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.767380 4784 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.223:6443: connect: connection refused" node="crc" Dec 05 12:25:21 crc kubenswrapper[4784]: W1205 12:25:21.921979 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:21 crc kubenswrapper[4784]: E1205 12:25:21.922572 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:21 crc kubenswrapper[4784]: I1205 12:25:21.945157 4784 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.002206 4784 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631" exitCode=0 Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.002290 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.002395 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"754e461a18855b2cc1f2f42aa6972825509877c5edda9582b0106a4179e5c44d"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.002502 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.003515 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.003551 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.003560 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004123 4784 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9" exitCode=0 Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004174 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004218 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b6474f421a4ed492634329b86537617d82e6ea52a536bbddaebab42bc753b3f3"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004285 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004916 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.004958 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.006267 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.006313 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f2fbb185c196a1f91077741993bd6b75f68c7d915aeec3b8a53dd4ddad5ee607"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.007923 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72" exitCode=0 Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.007961 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.007998 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5b7f9a7f93194531857b0c64280abe25674a02a60583a4cd28c1a30fdfb980c5"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.008120 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009025 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009721 4784 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6e8ab135b307e02a11d6a94e5a97c9e823bdd240f124cc9ad951b74a77f9919b" exitCode=0 Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009755 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6e8ab135b307e02a11d6a94e5a97c9e823bdd240f124cc9ad951b74a77f9919b"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009772 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"18afd3e75dd8b4051193318c21671095b9aeb812ff60fbf1a702cccf2b30eeb8"} Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.009867 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.010465 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.010484 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.010493 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.011563 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.012390 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.012424 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.012455 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: W1205 12:25:22.254044 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:22 crc kubenswrapper[4784]: E1205 12:25:22.254150 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:22 crc kubenswrapper[4784]: E1205 12:25:22.356158 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="1.6s" Dec 05 12:25:22 crc kubenswrapper[4784]: W1205 12:25:22.378176 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:22 crc kubenswrapper[4784]: E1205 12:25:22.378274 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:22 crc kubenswrapper[4784]: W1205 12:25:22.539620 4784 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:22 crc kubenswrapper[4784]: E1205 12:25:22.539751 4784 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.223:6443: connect: connection refused" logger="UnhandledError" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.567567 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.569316 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.569369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.569381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.569412 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:22 crc kubenswrapper[4784]: E1205 12:25:22.570016 4784 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.223:6443: connect: connection refused" node="crc" Dec 05 12:25:22 crc kubenswrapper[4784]: I1205 12:25:22.944140 4784 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.018410 4784 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2ab829abdf9d6a220dee205b362ae1a90cc650a7855a78bed6762d401672bda7" exitCode=0 Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.018496 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2ab829abdf9d6a220dee205b362ae1a90cc650a7855a78bed6762d401672bda7"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.018680 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.020506 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.020546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.020557 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.021742 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.021906 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.023494 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.023542 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.023554 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.025941 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.025994 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.026008 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.026119 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.027213 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.027251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.027260 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.031310 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.031359 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.031370 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.031391 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.036245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.036287 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.036296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.045085 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.045142 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.045156 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89"} Dec 05 12:25:23 crc kubenswrapper[4784]: I1205 12:25:23.045166 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7"} Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.050718 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f"} Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.050828 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.051772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.051804 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.051814 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.052922 4784 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="30cf54e462d22893043de62ee5ec395d39d33625bd6d7a8baf622785008e926a" exitCode=0 Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.052991 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"30cf54e462d22893043de62ee5ec395d39d33625bd6d7a8baf622785008e926a"} Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.053064 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.053112 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.053971 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.053995 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.054004 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.054049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.054078 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.054106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.170749 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.172237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.172274 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.172286 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:24 crc kubenswrapper[4784]: I1205 12:25:24.172312 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.063878 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"da53a04acf13696c1b3115201b15c2ffcfeea4a5088934d447e35d4a7cc3363a"} Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.063944 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b1fc823254ee849823e0b74145a7db442068b79310e3e579649d68cd2ccd9ee0"} Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.063951 4784 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.064025 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.063962 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4b8cb8d156930f9a574c9e3fa4ed3ab73ac533dc3db5f84474184a4c4d9ebe59"} Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.064990 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.065035 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.065052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.077638 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.077743 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.078799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.078879 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.078895 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.122092 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.380026 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:25 crc kubenswrapper[4784]: I1205 12:25:25.874403 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.071402 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4d79e079b19cd0eff65dffe4dad9ca1dfe85e81470dbdab91c9ad7022bdc14c"} Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.071470 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a1884f5a57577aa76ca436900539863ed47c67fa8ed2408a0d78127223dc4de8"} Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.071511 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.071514 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.071619 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072769 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072784 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072727 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072835 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072860 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.072973 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.073001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.073015 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.344815 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.344996 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.346235 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.346283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.346295 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.540015 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:26 crc kubenswrapper[4784]: I1205 12:25:26.919068 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.073507 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.073606 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.073793 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075084 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075264 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075304 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075382 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075402 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.075418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:27 crc kubenswrapper[4784]: I1205 12:25:27.961765 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.076271 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.076271 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078501 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078534 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078545 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078625 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078673 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.078688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.797094 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.857675 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.857947 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.859268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.859314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.859334 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:28 crc kubenswrapper[4784]: I1205 12:25:28.870016 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.078588 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.078593 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080083 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080099 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080058 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:29 crc kubenswrapper[4784]: I1205 12:25:29.080136 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:30 crc kubenswrapper[4784]: I1205 12:25:30.962388 4784 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 12:25:30 crc kubenswrapper[4784]: I1205 12:25:30.962458 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 12:25:31 crc kubenswrapper[4784]: E1205 12:25:31.065556 4784 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 12:25:33 crc kubenswrapper[4784]: I1205 12:25:33.427281 4784 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 12:25:33 crc kubenswrapper[4784]: I1205 12:25:33.427378 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 12:25:33 crc kubenswrapper[4784]: I1205 12:25:33.865643 4784 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]log ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]etcd ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/priority-and-fairness-filter ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-apiextensions-informers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-apiextensions-controllers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/crd-informer-synced ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-system-namespaces-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 05 12:25:33 crc kubenswrapper[4784]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/bootstrap-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/start-kube-aggregator-informers ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-registration-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-discovery-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]autoregister-completion ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-openapi-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 05 12:25:33 crc kubenswrapper[4784]: livez check failed Dec 05 12:25:33 crc kubenswrapper[4784]: I1205 12:25:33.865768 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:25:35 crc kubenswrapper[4784]: I1205 12:25:35.380865 4784 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 12:25:35 crc kubenswrapper[4784]: I1205 12:25:35.381005 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.158569 4784 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.159071 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.544915 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.545060 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.546654 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.546686 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.546695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.951875 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.952070 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.953246 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.953304 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.953333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:36 crc kubenswrapper[4784]: I1205 12:25:36.962998 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 12:25:37 crc kubenswrapper[4784]: I1205 12:25:37.103896 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:37 crc kubenswrapper[4784]: I1205 12:25:37.104622 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:37 crc kubenswrapper[4784]: I1205 12:25:37.105262 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:37 crc kubenswrapper[4784]: I1205 12:25:37.105285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:38 crc kubenswrapper[4784]: E1205 12:25:38.406024 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.407806 4784 trace.go:236] Trace[1860772386]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 12:25:24.694) (total time: 13713ms): Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[1860772386]: ---"Objects listed" error: 13713ms (12:25:38.407) Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[1860772386]: [13.713259339s] [13.713259339s] END Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.407837 4784 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.408417 4784 trace.go:236] Trace[120035150]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 12:25:24.110) (total time: 14297ms): Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[120035150]: ---"Objects listed" error: 14297ms (12:25:38.408) Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[120035150]: [14.297559068s] [14.297559068s] END Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.408471 4784 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 12:25:38 crc kubenswrapper[4784]: E1205 12:25:38.412249 4784 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.421360 4784 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.421395 4784 trace.go:236] Trace[856649779]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 12:25:25.540) (total time: 12880ms): Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[856649779]: ---"Objects listed" error: 12880ms (12:25:38.421) Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[856649779]: [12.880318405s] [12.880318405s] END Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.421642 4784 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.446455 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.464003 4784 trace.go:236] Trace[646139681]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 12:25:24.532) (total time: 13931ms): Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[646139681]: ---"Objects listed" error: 13930ms (12:25:38.463) Dec 05 12:25:38 crc kubenswrapper[4784]: Trace[646139681]: [13.931205945s] [13.931205945s] END Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.464046 4784 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.466743 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.862439 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.863251 4784 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.863413 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.870167 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.949351 4784 apiserver.go:52] "Watching apiserver" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.952173 4784 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.952462 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.952788 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.952836 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:38 crc kubenswrapper[4784]: E1205 12:25:38.952932 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.952950 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:38 crc kubenswrapper[4784]: E1205 12:25:38.953006 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.953029 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.953087 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.953022 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:38 crc kubenswrapper[4784]: E1205 12:25:38.953168 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.956107 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.956108 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.956678 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.956737 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.957033 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.957048 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.957096 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.957116 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.957209 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.982164 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:38 crc kubenswrapper[4784]: I1205 12:25:38.996008 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.008931 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.022064 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.032998 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.044393 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.049997 4784 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.056648 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.066150 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125068 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125098 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125115 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125133 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125148 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125168 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125203 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125219 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125233 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125249 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125262 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125277 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125291 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125307 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125323 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125337 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125379 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125397 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125410 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125441 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125454 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125469 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125484 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125499 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125512 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125526 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125542 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125556 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125582 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125597 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125612 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125626 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125650 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125679 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125695 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125711 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125727 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125745 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125760 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125775 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125789 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125804 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125818 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125831 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125845 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125863 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125878 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125892 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125907 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125921 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125936 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125950 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125964 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125982 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126001 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126016 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126030 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126045 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126059 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126073 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126088 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126104 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126134 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126150 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126164 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126179 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126206 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126224 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126241 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126257 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126273 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126289 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126305 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126320 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126335 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126350 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126398 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126412 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126428 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126443 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126457 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126471 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126486 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126501 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126518 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126533 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126548 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126564 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126579 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126596 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126612 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126628 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126644 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126660 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126677 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126694 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126711 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126735 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126751 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126788 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126806 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126824 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126840 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126856 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126953 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126972 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126989 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127005 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127022 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127038 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127091 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127106 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127125 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127140 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127157 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127173 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.125519 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127263 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127305 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127318 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127376 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127401 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127422 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127441 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127461 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127480 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127466 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127496 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127515 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127531 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127548 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127569 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127587 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127604 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127622 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127641 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127660 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127679 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127701 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127717 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127735 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127754 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127784 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127810 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127831 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127853 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127870 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127888 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127907 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127926 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127943 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127968 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127999 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128025 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128049 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128077 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128095 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128116 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128133 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128152 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128169 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128208 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128236 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128265 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128292 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128328 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128352 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128376 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128395 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128414 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128435 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128452 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128472 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128500 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128517 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128533 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128557 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128573 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128590 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128609 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128631 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128647 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128664 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128685 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128703 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128720 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128737 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128755 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128771 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128790 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128807 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128823 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128842 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128861 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128889 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128927 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128952 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129000 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129026 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129047 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129070 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129094 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129113 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129135 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129459 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129483 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129503 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129575 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129598 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129617 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129634 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129687 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129700 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131827 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127266 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.143531 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.143955 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.145976 4784 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146911 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127564 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127640 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126691 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126993 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.127014 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128033 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128060 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128097 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128179 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128239 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128415 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128419 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128548 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128636 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128733 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128781 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128848 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128917 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.128923 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129251 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129243 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149623 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.129383 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130032 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130171 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130514 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130517 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130528 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130526 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130657 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130700 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130751 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130775 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130789 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130956 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.130976 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131050 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131090 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131119 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131328 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131502 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131605 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131616 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131629 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131666 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.131880 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.126037 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.132252 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.132582 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.132803 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.132915 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.132964 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133087 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133286 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133394 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133433 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133599 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133620 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133713 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133806 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133872 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.133906 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134008 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134122 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134346 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134541 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134559 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134774 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.134780 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:25:39.6347575 +0000 UTC m=+19.054824375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134855 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149864 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135078 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135136 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135155 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135199 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150224 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135260 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.135273 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136406 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136635 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136775 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136964 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136973 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136994 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137127 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137096 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137197 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134953 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137363 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137620 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137641 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137673 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137878 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137943 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.137980 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138056 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138075 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138217 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138282 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138458 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138556 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138688 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138787 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138851 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138923 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138939 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.138955 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.139521 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.140109 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.140175 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.140379 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.140644 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141017 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.136863 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141356 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141363 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141575 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141798 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141868 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.142571 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.141176 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.143947 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144103 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.144155 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144394 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144397 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144562 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144822 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144839 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.144890 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.145204 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.145505 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146775 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146832 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146870 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146875 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.146887 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147107 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147104 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147552 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147565 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147683 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147862 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.147910 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148225 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148249 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148411 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148525 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148707 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.148857 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149245 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149301 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149338 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149404 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149731 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149797 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.149812 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.134869 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150533 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150519 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150551 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150837 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.150908 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.151125 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.151518 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:39.651492345 +0000 UTC m=+19.071559160 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.151711 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:39.651701401 +0000 UTC m=+19.071768226 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.151851 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.151980 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.159097 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.159395 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.159939 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.160295 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.160325 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.160341 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.160424 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:39.660400564 +0000 UTC m=+19.080467379 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.161079 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.161096 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.161109 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.161176 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:39.661160788 +0000 UTC m=+19.081227603 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.161876 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.162140 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.162453 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.163996 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f"} Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.164055 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.164342 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f" exitCode=255 Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.164349 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.164527 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.167741 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.167632 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.168003 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.168000 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.168100 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.168666 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.168905 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.169516 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.169713 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.170923 4784 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.171011 4784 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.171309 4784 scope.go:117] "RemoveContainer" containerID="f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.173739 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.176254 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.177135 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.177339 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.178054 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.178322 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.180343 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.180527 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.180528 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.180556 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.180870 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.185067 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.188322 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.197553 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.198544 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.204176 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.207669 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.211691 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.217877 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.226516 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230359 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230393 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230449 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230459 4784 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230468 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230477 4784 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230485 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230495 4784 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230502 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230511 4784 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230519 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230526 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230535 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230543 4784 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230551 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230558 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230567 4784 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230574 4784 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230582 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230590 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230598 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230606 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230613 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230620 4784 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230629 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230637 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230646 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230655 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230663 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230671 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230679 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230687 4784 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230697 4784 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230706 4784 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230684 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230715 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230775 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230787 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230798 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230807 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230816 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230825 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230833 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230844 4784 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230853 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230862 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230871 4784 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230879 4784 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230887 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230896 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230904 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230913 4784 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230922 4784 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230932 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230941 4784 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230949 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230960 4784 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230969 4784 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230977 4784 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230985 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230995 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231003 4784 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231011 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231020 4784 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231028 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231036 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231045 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231053 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231062 4784 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231071 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231079 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231088 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231098 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231106 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231115 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231123 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231132 4784 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231142 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231151 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231159 4784 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231167 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231176 4784 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231199 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231211 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231222 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231233 4784 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231245 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231257 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231268 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231278 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231288 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231299 4784 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231307 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231316 4784 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231324 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231335 4784 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231343 4784 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231353 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231362 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231370 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231379 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231388 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231396 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231404 4784 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231412 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231420 4784 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231428 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231436 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231443 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231451 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231460 4784 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231468 4784 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231475 4784 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231483 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231491 4784 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231501 4784 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231509 4784 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231518 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231525 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231533 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231541 4784 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231550 4784 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231559 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231567 4784 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231574 4784 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231582 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231589 4784 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231598 4784 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231606 4784 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231615 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231623 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231631 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231640 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231648 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231657 4784 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231668 4784 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231677 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231684 4784 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231691 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231699 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231708 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231718 4784 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231726 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231735 4784 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231743 4784 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231751 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231759 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231767 4784 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231775 4784 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231782 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231790 4784 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231798 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231805 4784 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231813 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231821 4784 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231829 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231838 4784 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231847 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231855 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231863 4784 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231872 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231879 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231887 4784 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231895 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231903 4784 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231912 4784 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231920 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231927 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231936 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231945 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231953 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231961 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231969 4784 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231977 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.231984 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232008 4784 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232016 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232024 4784 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232032 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232040 4784 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232048 4784 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232056 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232063 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232071 4784 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232079 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232088 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232097 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232107 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232118 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232129 4784 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232141 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232151 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232160 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232171 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.232196 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.230632 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.236349 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.246326 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.268642 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.278820 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.282163 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.638497 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.638732 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:25:40.638699045 +0000 UTC m=+20.058765860 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.740013 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.740074 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.740107 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.740135 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740108 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740229 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740487 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:40.74044146 +0000 UTC m=+20.160508275 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740550 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740244 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740580 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740259 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740660 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740675 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740663 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:40.740638795 +0000 UTC m=+20.160705790 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740703 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:40.740692777 +0000 UTC m=+20.160759812 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.740724 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:40.740716268 +0000 UTC m=+20.160783323 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:39 crc kubenswrapper[4784]: I1205 12:25:39.998302 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:39 crc kubenswrapper[4784]: E1205 12:25:39.998462 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.172244 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.172799 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"90bcfe2f10bd5b9e278a0116b542d723e12f2c17474dc3d356ce0199cd2564cb"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.174619 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.174680 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.174692 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dc4716540615370630636d54671b4febf017a1159fc5d98ad01bc4760ac6d656"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.176166 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f662e5375e2488b0dee348ddea290d015361982e81f7b85207fd17e7e5ddbab4"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.178626 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.186231 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e"} Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.186287 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.216937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.235868 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.253371 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.267791 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.285917 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.307413 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.321328 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.336729 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.360433 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.373654 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.386141 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.399128 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.412074 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.425076 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.439805 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.452971 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:40Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.648670 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.648837 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:25:42.648819106 +0000 UTC m=+22.068885921 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.750097 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.750153 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.750201 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.750230 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750313 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750382 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750406 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750414 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:42.750390645 +0000 UTC m=+22.170457530 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750420 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750412 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750450 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750518 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750535 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750481 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:42.750463667 +0000 UTC m=+22.170530562 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750630 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:42.750602032 +0000 UTC m=+22.170668917 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.750651 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:42.750643983 +0000 UTC m=+22.170710798 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.998866 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:40 crc kubenswrapper[4784]: I1205 12:25:40.998874 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.999044 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:40 crc kubenswrapper[4784]: E1205 12:25:40.999148 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.002473 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.003339 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.004059 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.004657 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.006309 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.006929 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.007619 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.008585 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.009229 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.010421 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.011033 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.012336 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.012872 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.014143 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.014576 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.014725 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.015745 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.016472 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.016986 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.018316 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.019109 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.019698 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.020836 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.021377 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.022633 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.023112 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.024474 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.025294 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.025702 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.025858 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.027024 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.027785 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.028785 4784 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.028899 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.030847 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.031729 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.032137 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.033857 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.035944 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.036698 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.037413 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.039042 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.039413 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.039621 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.040786 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.041920 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.042706 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.043692 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.044388 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.045429 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.046300 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.047109 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.047562 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.048068 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.048968 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.049536 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.050633 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.052414 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.065707 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.081120 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.097758 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.110105 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.613247 4784 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.615375 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.615450 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.615484 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.615550 4784 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.622375 4784 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.622692 4784 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.623882 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.623932 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.623943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.623959 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.623971 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.643462 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.648245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.648291 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.648302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.648346 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.648356 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.659995 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.663917 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.663983 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.663995 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.664014 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.664040 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.677748 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.681833 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.681887 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.681899 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.681918 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.681932 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.695240 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.698806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.698835 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.698846 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.698862 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.698873 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.711551 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.711762 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.713495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.713535 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.713547 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.713564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.713576 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.816078 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.816136 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.816150 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.816165 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.816174 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.918650 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.918701 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.918712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.918735 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.918745 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:41Z","lastTransitionTime":"2025-12-05T12:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:41 crc kubenswrapper[4784]: I1205 12:25:41.997892 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:41 crc kubenswrapper[4784]: E1205 12:25:41.998098 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.021730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.021773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.021782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.021800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.021810 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.124661 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.124702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.124718 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.124736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.124749 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.190990 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.205930 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.217719 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.226946 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.227011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.227023 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.227036 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.227045 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.231456 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.242707 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.255214 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.268522 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.282503 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.301719 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.329392 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.329438 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.329448 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.329463 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.329474 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.432462 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.432509 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.432522 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.432542 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.432556 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.534820 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.534870 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.534881 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.534898 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.534912 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.637224 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.637288 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.637299 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.637319 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.637332 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.666560 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.666672 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.666649991 +0000 UTC m=+26.086716816 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.739829 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.740293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.740305 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.740324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.740445 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.767571 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.767641 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.767666 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.767695 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767787 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767861 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767912 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767928 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767950 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767878 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.767849307 +0000 UTC m=+26.187916122 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767996 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.768010 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.767872 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.768041 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.768007912 +0000 UTC m=+26.188074917 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.768079 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.768060694 +0000 UTC m=+26.188127509 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.768100 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.768091845 +0000 UTC m=+26.188158660 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.842574 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.842617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.842626 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.842647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.842663 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.946698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.946790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.946806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.946856 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.946880 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:42Z","lastTransitionTime":"2025-12-05T12:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.998602 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:42 crc kubenswrapper[4784]: I1205 12:25:42.998641 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.999087 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:42 crc kubenswrapper[4784]: E1205 12:25:42.999361 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.049444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.049495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.049525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.049542 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.049553 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.151930 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.151984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.152001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.152022 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.152036 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.254661 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.254722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.254732 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.254751 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.254762 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.357033 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.357078 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.357087 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.357108 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.357118 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.459643 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.459685 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.459697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.459716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.459729 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.562495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.562568 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.562582 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.562600 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.562613 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.664943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.664993 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.665004 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.665022 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.665035 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.767487 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.767531 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.767558 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.767573 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.767583 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.870140 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.870177 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.870198 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.870211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.870220 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.973388 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.973440 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.973456 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.973477 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.973491 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:43Z","lastTransitionTime":"2025-12-05T12:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.978860 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-xpw77"] Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.979214 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-g5gv5"] Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.979398 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.979410 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-g5gv5" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.981959 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.981982 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982270 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982442 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982539 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982784 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982816 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.982828 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.997159 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:43 crc kubenswrapper[4784]: I1205 12:25:43.998221 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:43 crc kubenswrapper[4784]: E1205 12:25:43.998317 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.009641 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.020662 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.032806 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.045832 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.057627 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.072846 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.075409 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.075467 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.075481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.075500 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.075513 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080780 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-os-release\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080816 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-bin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080834 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-kubelet\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080850 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-multus-certs\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080867 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-etc-kubernetes\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080884 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjrpt\" (UniqueName: \"kubernetes.io/projected/beba5d67-ad2e-4968-91da-3f451dd2cdc9-kube-api-access-rjrpt\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080900 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-netns\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080915 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-multus\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080933 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-hostroot\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080947 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.080962 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-cnibin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081052 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-k8s-cni-cncf-io\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081225 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-multus-daemon-config\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081313 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/beba5d67-ad2e-4968-91da-3f451dd2cdc9-hosts-file\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081336 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-system-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081373 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-cni-binary-copy\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081416 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-socket-dir-parent\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081458 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-conf-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.081486 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpk8f\" (UniqueName: \"kubernetes.io/projected/759cb09f-42c3-4254-82f8-b5285b61012a-kube-api-access-rpk8f\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.091495 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.110564 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.126000 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.138020 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.149266 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.168338 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.177965 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.178020 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.178033 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.178054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.178067 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182342 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/beba5d67-ad2e-4968-91da-3f451dd2cdc9-hosts-file\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182433 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-system-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182458 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-cni-binary-copy\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182500 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-socket-dir-parent\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182493 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/beba5d67-ad2e-4968-91da-3f451dd2cdc9-hosts-file\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182525 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-conf-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182574 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpk8f\" (UniqueName: \"kubernetes.io/projected/759cb09f-42c3-4254-82f8-b5285b61012a-kube-api-access-rpk8f\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182581 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-conf-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182586 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-socket-dir-parent\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182615 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-os-release\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182676 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-bin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182699 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-os-release\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182762 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-bin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182705 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-kubelet\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182817 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-multus-certs\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182833 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-kubelet\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182844 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-etc-kubernetes\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182870 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-multus-certs\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182888 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjrpt\" (UniqueName: \"kubernetes.io/projected/beba5d67-ad2e-4968-91da-3f451dd2cdc9-kube-api-access-rjrpt\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182911 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-etc-kubernetes\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182936 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-netns\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.182958 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-multus\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183024 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-netns\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183063 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-hostroot\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183085 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183132 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-var-lib-cni-multus\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183148 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-cni-binary-copy\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183166 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-cnibin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183215 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-k8s-cni-cncf-io\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183226 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-cnibin\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183251 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-multus-daemon-config\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183282 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-host-run-k8s-cni-cncf-io\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183320 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-hostroot\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183495 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-multus-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183649 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/759cb09f-42c3-4254-82f8-b5285b61012a-multus-daemon-config\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183685 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/759cb09f-42c3-4254-82f8-b5285b61012a-system-cni-dir\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.183765 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.203360 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpk8f\" (UniqueName: \"kubernetes.io/projected/759cb09f-42c3-4254-82f8-b5285b61012a-kube-api-access-rpk8f\") pod \"multus-g5gv5\" (UID: \"759cb09f-42c3-4254-82f8-b5285b61012a\") " pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.210247 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.214911 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjrpt\" (UniqueName: \"kubernetes.io/projected/beba5d67-ad2e-4968-91da-3f451dd2cdc9-kube-api-access-rjrpt\") pod \"node-resolver-xpw77\" (UID: \"beba5d67-ad2e-4968-91da-3f451dd2cdc9\") " pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.263927 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.280911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.280958 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.280969 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.280989 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.281001 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.297477 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xpw77" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.306499 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-g5gv5" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.313265 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.325760 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod759cb09f_42c3_4254_82f8_b5285b61012a.slice/crio-8eba044e790323bb9d0c348180b16627bd5710daf095c0ba87d74f5ee10a7a89 WatchSource:0}: Error finding container 8eba044e790323bb9d0c348180b16627bd5710daf095c0ba87d74f5ee10a7a89: Status 404 returned error can't find the container with id 8eba044e790323bb9d0c348180b16627bd5710daf095c0ba87d74f5ee10a7a89 Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.326065 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbeba5d67_ad2e_4968_91da_3f451dd2cdc9.slice/crio-3ebd878910779b591a467b15275efeac548da0176e4acc53f25be1ed0f3cbcec WatchSource:0}: Error finding container 3ebd878910779b591a467b15275efeac548da0176e4acc53f25be1ed0f3cbcec: Status 404 returned error can't find the container with id 3ebd878910779b591a467b15275efeac548da0176e4acc53f25be1ed0f3cbcec Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.345790 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.380830 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-sx8lm"] Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.381570 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.383324 4784 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.383488 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.383625 4784 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.383683 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.384603 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.384721 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.384800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.384882 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.384974 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.387281 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fxbpl"] Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.387815 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.388036 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.388492 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.391413 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.393865 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-vpljs"] Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.394124 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-config": failed to list *v1.ConfigMap: configmaps "ovnkube-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.394201 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.394925 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.395641 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.395668 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.395944 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": failed to list *v1.ConfigMap: configmaps "ovnkube-script-lib" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.395933 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.395994 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-script-lib\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-script-lib\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.396049 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": failed to list *v1.Secret: secrets "ovn-node-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.396061 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-node-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-node-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.396077 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"env-overrides": failed to list *v1.ConfigMap: configmaps "env-overrides" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.396102 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.396114 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.396126 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"env-overrides\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"env-overrides\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.396459 4784 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": failed to list *v1.Secret: secrets "ovn-kubernetes-node-dockercfg-pwtwl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.396549 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-node-dockercfg-pwtwl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-node-dockercfg-pwtwl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.401975 4784 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.402130 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: W1205 12:25:44.402326 4784 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.402467 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485301 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485343 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485410 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485438 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485598 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485615 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485630 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485680 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485703 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be412f31-7a36-4811-8914-be8cdc987d08-mcd-auth-proxy-config\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485857 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485927 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485947 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485968 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.485984 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486099 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486294 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qd2p\" (UniqueName: \"kubernetes.io/projected/be412f31-7a36-4811-8914-be8cdc987d08-kube-api-access-9qd2p\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486356 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486391 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486423 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486454 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/be412f31-7a36-4811-8914-be8cdc987d08-rootfs\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486525 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486563 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be412f31-7a36-4811-8914-be8cdc987d08-proxy-tls\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486590 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.486648 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv9d9\" (UniqueName: \"kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.487444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.487479 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.487489 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.487506 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.487517 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.570363 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587344 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be412f31-7a36-4811-8914-be8cdc987d08-proxy-tls\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587393 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587433 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-binary-copy\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587455 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv9d9\" (UniqueName: \"kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587474 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cnibin\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587496 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587594 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587648 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587665 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-system-cni-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587677 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587706 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587777 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587836 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntgll\" (UniqueName: \"kubernetes.io/projected/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-kube-api-access-ntgll\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587865 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587944 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588310 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588343 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be412f31-7a36-4811-8914-be8cdc987d08-mcd-auth-proxy-config\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588366 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588409 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588432 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588432 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588468 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588464 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.587971 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588505 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588539 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588577 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588586 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588606 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588628 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588613 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588637 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588677 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588696 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588731 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588746 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qd2p\" (UniqueName: \"kubernetes.io/projected/be412f31-7a36-4811-8914-be8cdc987d08-kube-api-access-9qd2p\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588768 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588782 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588798 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588813 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-os-release\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588848 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/be412f31-7a36-4811-8914-be8cdc987d08-rootfs\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588867 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588938 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.588971 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.589053 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/be412f31-7a36-4811-8914-be8cdc987d08-rootfs\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.589071 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be412f31-7a36-4811-8914-be8cdc987d08-mcd-auth-proxy-config\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.589111 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.589080 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.590688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.590721 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.590731 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.590753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.590765 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.597611 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.618163 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qd2p\" (UniqueName: \"kubernetes.io/projected/be412f31-7a36-4811-8914-be8cdc987d08-kube-api-access-9qd2p\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.632859 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.653945 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.680925 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690394 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-os-release\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690479 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-binary-copy\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690516 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cnibin\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690544 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-system-cni-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690570 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntgll\" (UniqueName: \"kubernetes.io/projected/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-kube-api-access-ntgll\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690612 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690637 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690780 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.690883 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-os-release\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.691112 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-system-cni-dir\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.691251 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cnibin\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.691916 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-binary-copy\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.693918 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.694180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.694243 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.694265 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.694304 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.696631 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.709868 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntgll\" (UniqueName: \"kubernetes.io/projected/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-kube-api-access-ntgll\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.710216 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.726380 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.739216 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.755769 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.771792 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.785407 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.797460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.797502 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.797512 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.797529 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.797540 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.801502 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:44Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.900081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.900144 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.900158 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.900180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.900216 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:44Z","lastTransitionTime":"2025-12-05T12:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.998512 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:44 crc kubenswrapper[4784]: I1205 12:25:44.998598 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.998786 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:44 crc kubenswrapper[4784]: E1205 12:25:44.999003 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.002084 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.002137 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.002150 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.002169 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.002202 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.104312 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.104357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.104365 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.104380 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.104393 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.199997 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xpw77" event={"ID":"beba5d67-ad2e-4968-91da-3f451dd2cdc9","Type":"ContainerStarted","Data":"52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.200060 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xpw77" event={"ID":"beba5d67-ad2e-4968-91da-3f451dd2cdc9","Type":"ContainerStarted","Data":"3ebd878910779b591a467b15275efeac548da0176e4acc53f25be1ed0f3cbcec"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.201416 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerStarted","Data":"8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.201447 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerStarted","Data":"8eba044e790323bb9d0c348180b16627bd5710daf095c0ba87d74f5ee10a7a89"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.207213 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.207256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.207268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.207287 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.207298 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.217401 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.239413 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.256411 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.264331 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.270379 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.273201 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58eb6bc9-be04-4bd0-a0a1-4021cfc2095b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vpljs\" (UID: \"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\") " pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.286568 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.298897 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.301597 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.310109 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.310153 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.310163 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.310200 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.310216 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.320532 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.336239 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.350233 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.364423 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.364908 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.370753 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vpljs" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.375904 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: W1205 12:25:45.384402 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58eb6bc9_be04_4bd0_a0a1_4021cfc2095b.slice/crio-eb54ac25facfd555933671abea006842c4b12806dafce5ad3deb0f4efcbc7412 WatchSource:0}: Error finding container eb54ac25facfd555933671abea006842c4b12806dafce5ad3deb0f4efcbc7412: Status 404 returned error can't find the container with id eb54ac25facfd555933671abea006842c4b12806dafce5ad3deb0f4efcbc7412 Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.391072 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.410702 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.412375 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.413018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.413053 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.413068 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.413086 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.413099 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.428753 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.428859 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.441779 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be412f31-7a36-4811-8914-be8cdc987d08-proxy-tls\") pod \"machine-config-daemon-sx8lm\" (UID: \"be412f31-7a36-4811-8914-be8cdc987d08\") " pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.443793 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.456116 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.468807 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.489800 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.504466 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.516013 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.516057 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.516094 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.516471 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.516561 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.521381 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.534732 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.546937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.562912 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.577542 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.588623 4784 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/ovnkube-script-lib: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.588721 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib podName:291f2a35-7dd5-4af9-87f0-caae4ef75c66 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.088694989 +0000 UTC m=+25.508761804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovnkube-script-lib" (UniqueName: "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib") pod "ovnkube-node-fxbpl" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589296 4784 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/env-overrides: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589300 4784 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-node-metrics-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589333 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides podName:291f2a35-7dd5-4af9-87f0-caae4ef75c66 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.089322968 +0000 UTC m=+25.509389783 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "env-overrides" (UniqueName: "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides") pod "ovnkube-node-fxbpl" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589377 4784 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/ovnkube-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589448 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert podName:291f2a35-7dd5-4af9-87f0-caae4ef75c66 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.089425672 +0000 UTC m=+25.509492487 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-node-metrics-cert" (UniqueName: "kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert") pod "ovnkube-node-fxbpl" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66") : failed to sync secret cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.589538 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config podName:291f2a35-7dd5-4af9-87f0-caae4ef75c66 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:46.089507924 +0000 UTC m=+25.509574729 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovnkube-config" (UniqueName: "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config") pod "ovnkube-node-fxbpl" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66") : failed to sync configmap cache: timed out waiting for the condition Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.589892 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.591607 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.605890 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:45Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.623569 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.623651 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.623668 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.623694 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.623713 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.696393 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.703442 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv9d9\" (UniqueName: \"kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.726239 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.726285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.726296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.726314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.726327 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.732560 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.736994 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.828745 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.828800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.828810 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.828830 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.828842 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.832546 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.837736 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.931963 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.932003 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.932013 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.932031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.932043 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:45Z","lastTransitionTime":"2025-12-05T12:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.964807 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 12:25:45 crc kubenswrapper[4784]: I1205 12:25:45.998030 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:45 crc kubenswrapper[4784]: E1205 12:25:45.998162 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.034639 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.034678 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.034687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.034701 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.034711 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.071973 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-dzwxp"] Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.072489 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.075758 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.076047 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.076496 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.076816 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.097258 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104036 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104076 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104103 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-host\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104143 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104176 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-serviceca\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104397 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104492 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2xfq\" (UniqueName: \"kubernetes.io/projected/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-kube-api-access-x2xfq\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.104779 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.105240 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.105454 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.109013 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") pod \"ovnkube-node-fxbpl\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.114478 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.129445 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.136895 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.136953 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.136967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.136984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.137001 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.143296 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.154937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.166091 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.181770 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.195809 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.203476 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.204998 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2xfq\" (UniqueName: \"kubernetes.io/projected/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-kube-api-access-x2xfq\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.205050 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-host\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.205092 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-serviceca\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.206020 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-serviceca\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.206359 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-host\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.218606 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e" exitCode=0 Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.218682 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.218737 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerStarted","Data":"eb54ac25facfd555933671abea006842c4b12806dafce5ad3deb0f4efcbc7412"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.234165 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.234816 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.234923 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.234939 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"2c8a4e59155a81134628de754be774cfbb1fb4f4193f66593bb9b7881053f6e7"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.243546 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2xfq\" (UniqueName: \"kubernetes.io/projected/2f8364fb-1be8-4baa-aff0-10d4a4e8d614-kube-api-access-x2xfq\") pod \"node-ca-dzwxp\" (UID: \"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\") " pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.245328 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.245362 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.245371 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.245386 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.245395 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.257095 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.272492 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.288962 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.306579 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.323267 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.338259 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.353013 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.353151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.354816 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.354850 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.356212 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.356253 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.368317 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.381799 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.386972 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dzwxp" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.396680 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.413600 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.428463 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.448018 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.459694 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.459734 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.459746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.459765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.459778 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.463754 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.480246 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.495092 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.506645 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.519261 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.533068 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:46Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.563136 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.563177 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.563209 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.563227 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.563235 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.666051 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.666461 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.666474 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.666491 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.666502 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.711555 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:25:54.711522969 +0000 UTC m=+34.131589784 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.711286 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.769584 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.769632 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.769642 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.769659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.769670 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.812465 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.812509 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.812530 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.812550 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812630 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812632 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812646 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812654 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812659 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812664 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812680 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812711 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:54.812695884 +0000 UTC m=+34.232762699 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812726 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:54.812720945 +0000 UTC m=+34.232787760 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812766 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:54.812744286 +0000 UTC m=+34.232811101 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812680 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.812793 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:54.812787737 +0000 UTC m=+34.232854552 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.872706 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.872747 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.872759 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.872777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.872792 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.975211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.975248 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.975258 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.975273 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.975286 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:46Z","lastTransitionTime":"2025-12-05T12:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.998107 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:46 crc kubenswrapper[4784]: I1205 12:25:46.998164 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.998305 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:46 crc kubenswrapper[4784]: E1205 12:25:46.998404 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.077325 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.077375 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.077386 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.077402 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.077415 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.181443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.181826 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.181835 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.181851 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.181861 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.240560 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6" exitCode=0 Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.240650 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.242914 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dzwxp" event={"ID":"2f8364fb-1be8-4baa-aff0-10d4a4e8d614","Type":"ContainerStarted","Data":"01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.242967 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dzwxp" event={"ID":"2f8364fb-1be8-4baa-aff0-10d4a4e8d614","Type":"ContainerStarted","Data":"092bf0e3ba3f2ce9c09f8af1d212963ba2f8e7de1a27d4f71cf6f1d573ac8a9d"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.244288 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" exitCode=0 Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.244318 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.244345 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"a5b2c66dee0e25112b9df888c2404f4f856ad3027d4cacf746ce6c499e7a9aab"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.260820 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.279408 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.284567 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.284608 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.284617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.284636 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.284649 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.293496 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.308114 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.332098 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.348042 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.362893 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.380298 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.386456 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.386490 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.386499 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.386514 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.386524 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.393054 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.404391 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.422038 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.436415 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.451379 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.467650 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.483852 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.489859 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.489912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.489927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.489945 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.489955 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.499738 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.515497 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.529906 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.546987 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.562605 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.578653 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.593226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.593270 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.593281 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.593296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.593306 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.596244 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.611465 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.626706 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.639958 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.660106 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.683462 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.695593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.695636 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.695648 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.695668 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.695688 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.700449 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:47Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.797645 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.797695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.797713 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.797733 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.797743 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.899697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.899736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.899744 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.899759 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.899767 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:47Z","lastTransitionTime":"2025-12-05T12:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:47 crc kubenswrapper[4784]: I1205 12:25:47.997817 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:47 crc kubenswrapper[4784]: E1205 12:25:47.997945 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.002464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.002490 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.002501 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.002515 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.002526 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.105954 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.105998 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.106009 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.106031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.106042 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.209034 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.209076 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.209089 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.209105 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.209117 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.250242 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2" exitCode=0 Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.250316 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254436 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254487 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254498 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254508 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254519 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.254528 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.272347 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.283685 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.298106 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.313407 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.315659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.315687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.315695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.315709 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.315719 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.325645 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.335570 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.346715 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.363223 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.375638 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.387846 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.398705 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.410587 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.417211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.417237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.417246 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.417261 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.417270 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.421750 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.435883 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:48Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.520148 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.520182 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.520215 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.520233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.520244 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.623056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.623097 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.623108 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.623125 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.623135 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.726434 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.726523 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.726558 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.726596 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.726616 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.829382 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.829441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.829454 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.829474 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.829485 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.932281 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.932339 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.932356 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.932376 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.932387 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:48Z","lastTransitionTime":"2025-12-05T12:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.998751 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:48 crc kubenswrapper[4784]: I1205 12:25:48.998780 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:48 crc kubenswrapper[4784]: E1205 12:25:48.999130 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:48 crc kubenswrapper[4784]: E1205 12:25:48.999309 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.035159 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.035257 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.035276 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.035306 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.035325 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.165660 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.165712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.165728 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.165766 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.165783 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.259853 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64" exitCode=0 Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.259924 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.268373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.268416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.268431 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.268449 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.268462 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.275811 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.298311 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.319298 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.334230 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.348089 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.360521 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.371669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.371711 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.371719 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.371735 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.371745 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.372593 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.386338 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.403868 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.418798 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.433871 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.451425 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.464022 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.474414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.474459 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.474485 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.474502 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.474514 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.478174 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:49Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.577812 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.577853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.577861 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.577876 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.577885 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.680717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.680782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.680799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.680820 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.680834 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.783138 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.783181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.783203 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.783219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.783230 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.885817 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.885903 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.885936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.885958 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.885971 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.989286 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.989351 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.989369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.989396 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.989413 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:49Z","lastTransitionTime":"2025-12-05T12:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:49 crc kubenswrapper[4784]: I1205 12:25:49.998832 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:49 crc kubenswrapper[4784]: E1205 12:25:49.999089 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.092292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.092333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.092344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.092368 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.092378 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.195152 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.195228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.195242 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.195276 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.195293 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.267274 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerStarted","Data":"ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.298245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.298302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.298315 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.298338 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.298352 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.401556 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.401604 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.401619 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.401637 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.401648 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.506352 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.506395 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.506407 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.506423 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.506434 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.609625 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.609679 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.609699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.609721 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.609737 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.712373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.712410 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.712418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.712433 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.712443 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.815622 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.815908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.815919 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.815936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.815948 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.918711 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.918748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.918758 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.918772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.918781 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:50Z","lastTransitionTime":"2025-12-05T12:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.998236 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:50 crc kubenswrapper[4784]: I1205 12:25:50.998334 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:50 crc kubenswrapper[4784]: E1205 12:25:50.998453 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:50 crc kubenswrapper[4784]: E1205 12:25:50.998570 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.016937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.021135 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.021174 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.021207 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.021227 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.021240 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.032791 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.045279 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.057780 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.082270 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.090835 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.102903 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.121438 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.123159 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.123209 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.123220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.123233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.123244 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.132381 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.142812 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.155317 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.167403 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.178667 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.189417 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.224924 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.224969 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.224986 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.225007 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.225035 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.272923 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc" exitCode=0 Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.272994 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.277351 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.293452 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.306497 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.315388 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.327429 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.327469 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.327479 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.327493 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.327502 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.331332 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.347135 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.360895 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.374817 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.390063 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.406454 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.421659 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.429996 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.430028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.430036 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.430053 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.430063 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.435080 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.448307 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.465839 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.476283 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.532820 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.532858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.532871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.532891 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.532903 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.636877 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.636919 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.636928 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.636944 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.636953 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.740355 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.740404 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.740416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.740435 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.740449 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.843135 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.843209 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.843223 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.843240 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.843252 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.945619 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.945661 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.945672 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.945689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.945698 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:51Z","lastTransitionTime":"2025-12-05T12:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:51 crc kubenswrapper[4784]: I1205 12:25:51.998313 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:51 crc kubenswrapper[4784]: E1205 12:25:51.998455 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.048127 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.048180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.048224 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.048243 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.048255 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.089612 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.089666 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.089683 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.089704 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.089720 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.101294 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.105908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.106090 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.106170 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.106297 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.106411 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.119752 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.125629 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.125686 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.125699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.125720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.125732 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.142152 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.147654 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.147709 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.147723 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.147744 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.147757 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.163986 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.168482 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.168526 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.168538 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.168555 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.168567 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.181255 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.181452 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.183320 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.183355 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.183366 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.183383 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.183394 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.285326 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.285383 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.285396 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.285417 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.285435 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.286158 4784 generic.go:334] "Generic (PLEG): container finished" podID="58eb6bc9-be04-4bd0-a0a1-4021cfc2095b" containerID="709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f" exitCode=0 Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.286215 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerDied","Data":"709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.305442 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.322323 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.338411 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.353652 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.366912 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.380808 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.388751 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.388791 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.388802 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.388819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.388832 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.393023 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.405991 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.422671 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.438825 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.450516 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.462890 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.475001 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.485550 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:52Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.491471 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.491511 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.491519 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.491536 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.491550 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.594241 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.594272 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.594280 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.594292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.594302 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.696294 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.696336 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.696347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.696363 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.696374 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.799284 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.799558 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.799851 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.800011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.800148 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.902460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.902911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.902921 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.902937 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.902946 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:52Z","lastTransitionTime":"2025-12-05T12:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.998103 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:52 crc kubenswrapper[4784]: I1205 12:25:52.998178 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.998277 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:52 crc kubenswrapper[4784]: E1205 12:25:52.998401 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.007026 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.007060 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.007069 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.007085 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.007098 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.108753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.108786 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.108821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.108834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.108847 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.211634 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.211675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.211685 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.211701 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.211710 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.293405 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" event={"ID":"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b","Type":"ContainerStarted","Data":"22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.298521 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.298791 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.313869 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.314511 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.314546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.314564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.314581 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.314592 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.318057 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.326931 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.339129 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.351759 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.364815 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.378940 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.392858 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.408827 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.417176 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.417242 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.417285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.417304 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.417318 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.433937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.446155 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.459966 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.471861 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.484152 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.496664 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.508454 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.519343 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.519387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.519396 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.519412 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.519421 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.524231 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.540512 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.554500 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.566291 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.579122 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.591243 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.603397 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.622302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.622355 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.622365 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.622380 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.622392 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.623041 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.649059 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.659958 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.676155 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.688272 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.701681 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.724853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.724902 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.724910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.724926 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.724936 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.828263 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.828317 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.828330 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.828347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.828358 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.930813 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.930890 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.930913 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.930943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.930960 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:53Z","lastTransitionTime":"2025-12-05T12:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:53 crc kubenswrapper[4784]: I1205 12:25:53.998266 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:53 crc kubenswrapper[4784]: E1205 12:25:53.998425 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.034167 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.034216 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.034225 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.034241 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.034251 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.137942 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.137993 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.138003 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.138028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.138040 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.241787 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.241871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.241890 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.241929 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.241953 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.303490 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.303560 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.325415 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.341043 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.345711 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.345775 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.345788 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.345813 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.345828 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.356812 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.372449 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.386548 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.401539 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.414974 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.433696 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448680 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448738 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448751 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448776 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448791 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.448894 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.462240 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.485359 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.502528 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.517970 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.536342 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.548540 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:54Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.551279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.551314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.551327 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.551345 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.551358 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.656838 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.656889 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.656899 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.656918 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.656929 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.721686 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.721934 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:26:10.721912949 +0000 UTC m=+50.141979764 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.782416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.782481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.782501 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.782516 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.782527 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.822535 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.822584 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.822608 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.822627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822685 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822714 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822726 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822733 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822747 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822758 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822781 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:10.822765624 +0000 UTC m=+50.242832439 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822799 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:10.822792695 +0000 UTC m=+50.242859510 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822814 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822841 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:10.822828666 +0000 UTC m=+50.242895481 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822882 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: E1205 12:25:54.822900 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:10.822894918 +0000 UTC m=+50.242961733 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.885167 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.885228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.885240 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.885257 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.885270 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.987971 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.987998 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.988005 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.988018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:54 crc kubenswrapper[4784]: I1205 12:25:54.988027 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:54Z","lastTransitionTime":"2025-12-05T12:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.002171 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:55 crc kubenswrapper[4784]: E1205 12:25:55.002305 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.002592 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:55 crc kubenswrapper[4784]: E1205 12:25:55.002638 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.090856 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.090888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.090897 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.090911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.090920 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.193399 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.193436 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.193446 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.193460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.193469 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.295597 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.295910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.295980 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.296056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.296129 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.385480 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.398683 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.398739 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.398750 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.398764 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.398774 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.402757 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.417607 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.431489 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.445273 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.459986 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.472865 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.492228 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.500783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.500988 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.501218 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.501348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.501481 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.503327 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.515666 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.528765 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.540045 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.549219 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.560878 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.575016 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:55Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.603999 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.604032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.604046 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.604059 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.604067 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.705733 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.706002 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.706107 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.706212 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.706343 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.808803 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.808846 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.808854 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.808868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.808877 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.910602 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.910644 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.910653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.910669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.910680 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:55Z","lastTransitionTime":"2025-12-05T12:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:55 crc kubenswrapper[4784]: I1205 12:25:55.998246 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:55 crc kubenswrapper[4784]: E1205 12:25:55.998392 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.013564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.013620 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.013633 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.013650 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.013663 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.116242 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.116276 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.116286 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.116300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.116310 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.218344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.218402 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.218412 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.218426 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.218435 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.320907 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.320955 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.320966 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.320984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.320996 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.423280 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.423324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.423333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.423348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.423358 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.526069 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.526115 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.526129 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.526148 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.526159 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.628814 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.628846 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.628854 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.628866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.628875 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.731073 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.731108 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.731117 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.731132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.731141 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.834219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.834292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.834308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.834328 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.834342 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.936578 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.936617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.936628 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.936860 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.936881 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:56Z","lastTransitionTime":"2025-12-05T12:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.998642 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:56 crc kubenswrapper[4784]: E1205 12:25:56.998785 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:56 crc kubenswrapper[4784]: I1205 12:25:56.998893 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:56 crc kubenswrapper[4784]: E1205 12:25:56.999045 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.039261 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.039298 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.039315 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.039331 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.039345 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.142641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.142691 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.142699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.142715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.142725 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.245332 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.245379 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.245390 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.245407 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.245419 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.314666 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/0.log" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.317415 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3" exitCode=1 Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.317456 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.318031 4784 scope.go:117] "RemoveContainer" containerID="30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.332878 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.344774 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.347719 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.347764 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.347773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.347789 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.347798 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.356904 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.368424 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.380937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.392503 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.404293 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.415650 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.436076 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.449556 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.450322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.450352 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.450360 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.450373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.450382 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.465175 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.478157 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.490537 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.502554 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.552171 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.552238 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.552253 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.552273 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.552284 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.586839 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb"] Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.587342 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.590101 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.592037 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.601439 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.625423 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.640852 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.651517 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.651573 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.651590 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.651700 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t7rf\" (UniqueName: \"kubernetes.io/projected/0851219b-f3dd-4229-b5c5-b4d86a452bfd-kube-api-access-7t7rf\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.654927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.654956 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.654967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.654983 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.654996 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.659087 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.686169 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.709839 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.726926 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.739417 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.749420 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.752701 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.752754 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.752772 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.752801 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t7rf\" (UniqueName: \"kubernetes.io/projected/0851219b-f3dd-4229-b5c5-b4d86a452bfd-kube-api-access-7t7rf\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.753507 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.753636 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.756713 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.756753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.756762 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.756779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.756789 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.760582 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0851219b-f3dd-4229-b5c5-b4d86a452bfd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.766465 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.771804 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t7rf\" (UniqueName: \"kubernetes.io/projected/0851219b-f3dd-4229-b5c5-b4d86a452bfd-kube-api-access-7t7rf\") pod \"ovnkube-control-plane-749d76644c-pzsqb\" (UID: \"0851219b-f3dd-4229-b5c5-b4d86a452bfd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.780781 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.792731 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.805053 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.818816 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.834525 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:57Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.859072 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.859112 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.859123 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.859140 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.859152 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.900586 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" Dec 05 12:25:57 crc kubenswrapper[4784]: W1205 12:25:57.911708 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0851219b_f3dd_4229_b5c5_b4d86a452bfd.slice/crio-585efa3ae5bb086cf1c0812b56608167d3189c5ba4d48c2099c3bbaf49d6455f WatchSource:0}: Error finding container 585efa3ae5bb086cf1c0812b56608167d3189c5ba4d48c2099c3bbaf49d6455f: Status 404 returned error can't find the container with id 585efa3ae5bb086cf1c0812b56608167d3189c5ba4d48c2099c3bbaf49d6455f Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.960676 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.960913 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.960973 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.961031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:57 crc kubenswrapper[4784]: I1205 12:25:57.961093 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:57Z","lastTransitionTime":"2025-12-05T12:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.011388 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.011742 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.063712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.063746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.063754 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.063768 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.063778 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.166254 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.166307 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.166322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.166340 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.166352 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.268758 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.268795 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.268805 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.268819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.268829 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.321993 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" event={"ID":"0851219b-f3dd-4229-b5c5-b4d86a452bfd","Type":"ContainerStarted","Data":"585efa3ae5bb086cf1c0812b56608167d3189c5ba4d48c2099c3bbaf49d6455f"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.324030 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/0.log" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.328366 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.329378 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.340590 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.365325 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.371482 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.371520 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.371532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.371548 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.371560 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.377180 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.389478 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.401076 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.414790 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.427751 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.439576 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.452761 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.473979 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.474011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.474020 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.474052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.474062 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.475424 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.488213 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.506251 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.517736 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.527521 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.545993 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.576358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.576389 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.576428 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.576446 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.576504 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.678359 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.678393 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.678404 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.678419 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.678429 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.725347 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ln9ct"] Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.725771 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.725832 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.743607 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.758874 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.764508 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.764722 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmtkj\" (UniqueName: \"kubernetes.io/projected/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-kube-api-access-rmtkj\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.770880 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.780539 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.780719 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.780810 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.780905 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.781003 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.782109 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.793168 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.809958 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.823218 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.839528 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.849855 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.861118 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.865691 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmtkj\" (UniqueName: \"kubernetes.io/projected/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-kube-api-access-rmtkj\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.865829 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.865960 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.866011 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:25:59.365996418 +0000 UTC m=+38.786063233 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.877744 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.883518 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.883563 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.883575 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.883593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.883606 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.884621 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmtkj\" (UniqueName: \"kubernetes.io/projected/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-kube-api-access-rmtkj\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.891123 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.902116 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.912238 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.922340 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.934268 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:58Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.985993 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.986068 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.986090 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.986119 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.986147 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:58Z","lastTransitionTime":"2025-12-05T12:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.998822 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:25:58 crc kubenswrapper[4784]: I1205 12:25:58.998823 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.999267 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:25:58 crc kubenswrapper[4784]: E1205 12:25:58.999432 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.089427 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.089794 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.089806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.089824 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.089836 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.192442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.192478 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.192486 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.192500 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.192509 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.294963 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.295004 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.295015 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.295032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.295044 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.334536 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/1.log" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.335447 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/0.log" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.338859 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae" exitCode=1 Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.338912 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.339008 4784 scope.go:117] "RemoveContainer" containerID="30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.339485 4784 scope.go:117] "RemoveContainer" containerID="96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae" Dec 05 12:25:59 crc kubenswrapper[4784]: E1205 12:25:59.339658 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.342038 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" event={"ID":"0851219b-f3dd-4229-b5c5-b4d86a452bfd","Type":"ContainerStarted","Data":"f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.342087 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" event={"ID":"0851219b-f3dd-4229-b5c5-b4d86a452bfd","Type":"ContainerStarted","Data":"5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.356098 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.369264 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.371781 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:25:59 crc kubenswrapper[4784]: E1205 12:25:59.372462 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:25:59 crc kubenswrapper[4784]: E1205 12:25:59.372546 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:00.372521286 +0000 UTC m=+39.792588141 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.381989 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.399863 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.399905 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.399917 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.399932 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.399944 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.413048 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.432724 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.451123 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.462700 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.473787 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.485534 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.499132 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.502362 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.502384 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.502391 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.502403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.502411 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.512169 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.524893 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.538661 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.558950 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.569902 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.581503 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.597104 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.604131 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.604169 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.604208 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.604228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.604239 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.610679 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.623387 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.641896 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.662297 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30b9ff9d210d13ad1e8b18d2ac807697426b39ffe6bdd405ed8940014afc91d3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:56Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 12:25:55.986142 6090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 12:25:55.986220 6090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 12:25:55.986226 6090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 12:25:55.986284 6090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:55.986288 6090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:55.986371 6090 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:55.986371 6090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:55.986390 6090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 12:25:55.986404 6090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 12:25:55.986409 6090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 12:25:55.986422 6090 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:55.986454 6090 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:55.986453 6090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:55.986465 6090 factory.go:656] Stopping watch factory\\\\nI1205 12:25:55.986478 6090 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.673852 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.686305 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.699404 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.707354 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.707423 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.707441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.707468 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.707486 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.711319 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.721317 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.732528 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.751814 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.765513 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.778088 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.790481 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.800323 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:25:59Z is after 2025-08-24T17:21:41Z" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.809856 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.809883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.809891 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.809904 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.809915 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.912609 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.912664 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.912672 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.912687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.912697 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:25:59Z","lastTransitionTime":"2025-12-05T12:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:25:59 crc kubenswrapper[4784]: I1205 12:25:59.998134 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:25:59 crc kubenswrapper[4784]: E1205 12:25:59.998325 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.015146 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.015208 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.015221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.015237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.015248 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.118119 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.118163 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.118172 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.118199 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.118210 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.220813 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.220845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.220854 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.220868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.220877 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.323047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.323116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.323132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.323155 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.323170 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.347396 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/1.log" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.350986 4784 scope.go:117] "RemoveContainer" containerID="96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae" Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.351124 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.362926 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.375142 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.382533 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.382650 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.382692 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:02.382679068 +0000 UTC m=+41.802745873 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.387572 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.399771 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.413436 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.426031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.426085 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.426098 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.426115 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.426128 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.438297 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.451442 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.467671 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.483380 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.494849 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.507864 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.520892 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.528638 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.528680 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.528695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.528715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.528730 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.535459 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.549115 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.565231 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.580540 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:00Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.631060 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.631099 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.631107 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.631122 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.631133 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.734322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.734372 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.734381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.734398 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.734410 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.836245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.836281 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.836289 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.836304 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.836313 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.938746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.938801 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.938819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.938841 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.938856 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:00Z","lastTransitionTime":"2025-12-05T12:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.998323 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.998420 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.998482 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:00 crc kubenswrapper[4784]: I1205 12:26:00.998508 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.998638 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:00 crc kubenswrapper[4784]: E1205 12:26:00.998876 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.016378 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.029120 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.039841 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.041251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.041285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.041293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.041311 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.041322 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.052831 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.064569 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.078824 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.098042 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.108064 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.118648 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.131312 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143264 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143711 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143771 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.143782 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.156315 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.165679 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.176681 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.190327 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.202054 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.248421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.248478 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.248493 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.248528 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.248543 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.351836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.351936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.351960 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.351989 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.352011 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.454914 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.454962 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.454970 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.454987 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.454995 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.557344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.557418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.557440 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.557466 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.557483 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.660518 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.660587 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.660611 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.660641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.660662 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.763709 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.763799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.763817 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.763840 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.763855 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.866312 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.866351 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.866370 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.866387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.866397 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.968593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.968629 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.968639 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.968653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.968661 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:01Z","lastTransitionTime":"2025-12-05T12:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:01 crc kubenswrapper[4784]: I1205 12:26:01.998213 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:01 crc kubenswrapper[4784]: E1205 12:26:01.998346 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.071230 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.071318 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.071337 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.071359 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.071374 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.174079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.174127 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.174159 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.174173 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.174181 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.234802 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.234896 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.234921 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.234952 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.234978 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.248170 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:02Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.252162 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.252241 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.252260 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.252282 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.252294 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.269079 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:02Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.273700 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.273756 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.273767 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.273783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.273796 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.285584 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:02Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.288981 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.289017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.289028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.289046 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.289061 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.300988 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:02Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.305920 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.305958 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.305968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.305984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.305995 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.319527 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:02Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.319699 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.321216 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.321249 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.321261 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.321277 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.321289 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.402964 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.403203 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.403286 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:06.403267922 +0000 UTC m=+45.823334737 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.424174 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.424282 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.424315 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.424344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.424365 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.526292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.526329 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.526337 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.526351 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.526379 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.628531 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.628592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.628602 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.628617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.628625 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.731675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.731753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.731775 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.731804 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.731825 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.835531 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.835584 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.835600 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.835617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.835627 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.938464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.938507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.938518 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.938534 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.938545 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:02Z","lastTransitionTime":"2025-12-05T12:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.997941 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.998034 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.998107 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.998172 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:02 crc kubenswrapper[4784]: I1205 12:26:02.998290 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:02 crc kubenswrapper[4784]: E1205 12:26:02.998370 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.042049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.042098 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.042110 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.042128 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.042141 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.144812 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.144845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.144859 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.144879 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.144891 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.247594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.247634 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.247643 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.247655 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.247664 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.350170 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.350251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.350262 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.350279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.350291 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.452527 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.452559 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.452569 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.452581 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.452591 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.555717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.555770 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.555781 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.555799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.555813 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.658961 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.659020 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.659032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.659056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.659070 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.761205 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.761256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.761264 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.761279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.761307 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.863656 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.863698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.863707 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.863725 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.863737 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.966530 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.966586 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.966602 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.966623 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.966638 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:03Z","lastTransitionTime":"2025-12-05T12:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:03 crc kubenswrapper[4784]: I1205 12:26:03.998476 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:03 crc kubenswrapper[4784]: E1205 12:26:03.998640 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.068476 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.068525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.068542 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.068564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.068579 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.171773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.171836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.171849 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.171871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.171893 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.274388 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.274498 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.274513 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.274564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.274579 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.376569 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.376600 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.376608 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.376621 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.376629 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.478357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.478408 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.478421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.478453 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.478466 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.581211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.581269 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.581292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.581318 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.581339 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.683180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.683233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.683243 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.683263 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.683278 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.786246 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.786283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.786291 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.786305 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.786319 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.889345 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.889411 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.889424 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.889444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.889457 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.992100 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.992143 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.992151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.992166 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.992176 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:04Z","lastTransitionTime":"2025-12-05T12:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.998227 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.998281 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:04 crc kubenswrapper[4784]: E1205 12:26:04.998469 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:04 crc kubenswrapper[4784]: I1205 12:26:04.998523 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:04 crc kubenswrapper[4784]: E1205 12:26:04.998677 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:04 crc kubenswrapper[4784]: E1205 12:26:04.998802 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.095398 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.095501 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.095572 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.095605 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.095628 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.198814 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.198889 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.198909 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.198933 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.198950 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.302003 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.302079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.302103 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.302132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.302154 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.405906 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.405989 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.406015 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.406047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.406074 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.509001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.509033 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.509041 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.509072 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.509083 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.611687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.611756 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.611821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.611834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.611843 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.714450 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.714491 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.714501 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.714516 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.714527 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.817138 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.817176 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.817228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.817244 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.817254 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.919747 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.919793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.919804 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.919819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.919831 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:05Z","lastTransitionTime":"2025-12-05T12:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:05 crc kubenswrapper[4784]: I1205 12:26:05.998666 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:05 crc kubenswrapper[4784]: E1205 12:26:05.998793 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.022487 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.022534 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.022552 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.022572 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.022587 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.124803 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.124842 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.124853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.124868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.124881 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.226898 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.226935 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.226943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.226955 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.226964 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.328774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.328813 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.328821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.328834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.328842 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.431206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.431258 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.431269 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.431283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.431292 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.443293 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:06 crc kubenswrapper[4784]: E1205 12:26:06.443443 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:06 crc kubenswrapper[4784]: E1205 12:26:06.443495 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:14.443480388 +0000 UTC m=+53.863547193 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.533459 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.533502 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.533515 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.533532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.533543 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.635506 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.635537 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.635547 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.635563 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.635573 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.737363 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.737393 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.737402 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.737414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.737423 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.839513 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.839540 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.839548 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.839561 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.839571 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.941536 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.941564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.941571 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.941582 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.941592 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:06Z","lastTransitionTime":"2025-12-05T12:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.998212 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.998213 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:06 crc kubenswrapper[4784]: E1205 12:26:06.998365 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:06 crc kubenswrapper[4784]: I1205 12:26:06.998434 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:06 crc kubenswrapper[4784]: E1205 12:26:06.998494 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:06 crc kubenswrapper[4784]: E1205 12:26:06.998523 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.043592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.043642 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.043653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.043669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.043679 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.145992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.146047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.146056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.146079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.146099 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.248054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.248114 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.248126 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.248143 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.248158 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.350724 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.350765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.350774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.350791 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.350803 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.452220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.452268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.452276 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.452293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.452302 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.556733 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.557173 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.557439 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.557617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.557793 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.660584 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.660664 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.660694 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.660730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.660755 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.762877 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.762914 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.762923 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.762937 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.762946 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.865995 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.866047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.866060 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.866081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.866094 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.968112 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.968206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.968218 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.968237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.968248 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:07Z","lastTransitionTime":"2025-12-05T12:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:07 crc kubenswrapper[4784]: I1205 12:26:07.998204 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:07 crc kubenswrapper[4784]: E1205 12:26:07.998361 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.071162 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.071233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.071242 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.071256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.071268 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.174129 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.174907 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.174946 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.174970 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.174986 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.277363 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.277413 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.277424 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.277441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.277453 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.379458 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.379496 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.379507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.379534 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.379545 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.482104 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.482150 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.482161 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.482178 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.482205 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.584656 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.584717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.584733 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.584762 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.584783 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.686292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.686332 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.686341 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.686357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.686367 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.788058 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.788097 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.788108 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.788124 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.788135 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.890984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.891154 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.891167 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.891183 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.891232 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.993014 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.993050 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.993058 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.993074 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.993082 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:08Z","lastTransitionTime":"2025-12-05T12:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.998343 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.998479 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:08 crc kubenswrapper[4784]: E1205 12:26:08.998559 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:08 crc kubenswrapper[4784]: I1205 12:26:08.998575 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:08 crc kubenswrapper[4784]: E1205 12:26:08.998722 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:08 crc kubenswrapper[4784]: E1205 12:26:08.998990 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.096597 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.096659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.096671 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.096693 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.096708 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.200030 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.200079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.200093 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.200111 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.200125 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.307166 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.307244 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.307255 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.307275 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.307287 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.410167 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.410239 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.410251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.410268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.410278 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.512942 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.512981 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.512989 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.513006 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.513016 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.615261 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.615292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.615300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.615313 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.615322 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.717731 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.717772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.717782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.717800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.717811 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.820543 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.820592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.820607 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.820623 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.820634 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.923125 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.923170 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.923181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.923221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.923236 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:09Z","lastTransitionTime":"2025-12-05T12:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:09 crc kubenswrapper[4784]: I1205 12:26:09.998366 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:09 crc kubenswrapper[4784]: E1205 12:26:09.998516 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.025916 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.025970 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.026009 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.026028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.026044 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.128897 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.128951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.128978 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.128998 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.129012 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.231684 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.231737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.231750 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.231769 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.231781 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.335000 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.335365 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.335376 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.335400 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.335793 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.437774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.437820 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.437835 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.437851 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.437862 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.539675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.539721 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.539734 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.539750 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.539761 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.642113 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.642168 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.642182 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.642226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.642242 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.744732 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.744775 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.744786 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.744801 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.744820 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.802926 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.803233 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:26:42.803177659 +0000 UTC m=+82.223244474 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.847393 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.847443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.847456 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.847475 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.847488 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.904092 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.904254 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904216 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904339 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:42.904325743 +0000 UTC m=+82.324392558 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904437 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.904274 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904505 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:42.904471298 +0000 UTC m=+82.324538113 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.904529 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904622 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904641 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904653 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904651 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904685 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904690 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:42.904683035 +0000 UTC m=+82.324749850 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904697 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.904749 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:42.904731527 +0000 UTC m=+82.324798412 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.949888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.949937 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.949949 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.949966 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.949980 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:10Z","lastTransitionTime":"2025-12-05T12:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.998690 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.998756 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:10 crc kubenswrapper[4784]: I1205 12:26:10.998888 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.999057 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.999220 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:10 crc kubenswrapper[4784]: E1205 12:26:10.999561 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.013329 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.026634 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.040952 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.051646 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.051687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.051696 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.051730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.051740 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.053353 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.062686 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.077974 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.090792 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.103813 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.117493 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.140795 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.151716 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.153628 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.153670 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.153681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.153698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.153709 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.163653 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.176808 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.189631 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.198608 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.209656 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:11Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.256049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.256103 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.256116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.256137 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.256150 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.359283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.359358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.359380 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.359409 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.359432 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.462370 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.462421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.462430 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.462445 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.462454 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.565648 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.565692 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.565702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.565719 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.565730 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.668120 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.668183 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.668233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.668255 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.668269 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.770421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.770469 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.770481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.770497 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.770511 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.872885 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.872927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.872950 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.872968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.872979 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.975330 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.975372 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.975381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.975396 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.975407 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:11Z","lastTransitionTime":"2025-12-05T12:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:11 crc kubenswrapper[4784]: I1205 12:26:11.998850 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:11 crc kubenswrapper[4784]: E1205 12:26:11.998997 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.078600 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.078659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.078669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.078690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.078703 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.180753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.180788 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.180805 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.180820 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.180832 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.284302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.284371 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.284387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.284408 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.284420 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.387035 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.387083 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.387094 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.387111 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.387122 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.490272 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.490347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.490373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.490405 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.490429 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.494857 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.494925 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.494948 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.494973 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.494990 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.512690 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:12Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.517906 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.518005 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.518028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.518053 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.518072 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.536711 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:12Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.540341 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.540386 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.540403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.540425 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.540443 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.552982 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:12Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.556690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.556737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.556746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.556763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.556776 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.569855 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:12Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.574148 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.574197 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.574211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.574229 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.574243 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.588096 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:12Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.588302 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.592824 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.592910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.592929 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.592951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.592967 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.695381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.695415 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.695426 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.695450 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.695462 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.798227 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.798287 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.798302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.798320 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.798328 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.901063 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.901101 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.902182 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.902232 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.902243 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:12Z","lastTransitionTime":"2025-12-05T12:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.998685 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.998761 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:12 crc kubenswrapper[4784]: I1205 12:26:12.998690 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.998829 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:12 crc kubenswrapper[4784]: E1205 12:26:12.998941 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:13 crc kubenswrapper[4784]: E1205 12:26:13.002485 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.004689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.004758 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.004796 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.004828 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.004852 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.006637 4784 scope.go:117] "RemoveContainer" containerID="96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.107956 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.108565 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.108640 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.108705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.108780 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.211299 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.211336 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.211368 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.211387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.211397 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.315752 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.315826 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.315842 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.315866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.315887 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.392781 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/1.log" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.396258 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.396796 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.415634 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.418293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.418321 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.418331 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.418345 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.418354 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.437271 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.451758 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.463237 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.476026 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.487149 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.498487 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.510694 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.520770 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.520808 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.520819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.520836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.520848 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.523026 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.538206 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.551022 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.563156 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.576492 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.612721 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.623813 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.623859 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.623871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.623888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.623900 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.638536 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.649885 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:13Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.726940 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.726987 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.726999 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.727018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.727031 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.829081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.829121 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.829133 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.829154 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.829167 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.931652 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.931689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.931700 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.931715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.931726 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:13Z","lastTransitionTime":"2025-12-05T12:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:13 crc kubenswrapper[4784]: I1205 12:26:13.998569 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:13 crc kubenswrapper[4784]: E1205 12:26:13.998688 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.033551 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.033592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.033601 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.033614 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.033624 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.136064 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.136098 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.136107 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.136121 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.136129 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.238797 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.239025 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.239117 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.239228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.239300 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.341451 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.341767 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.341910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.342039 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.342238 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.400954 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/2.log" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.401616 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/1.log" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.403993 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" exitCode=1 Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.404038 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.404075 4784 scope.go:117] "RemoveContainer" containerID="96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.404681 4784 scope.go:117] "RemoveContainer" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.404827 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.418631 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.433666 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.445467 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.445809 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.445951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.446087 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.446261 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.446267 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.461523 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.479499 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e440d10f16055ce61279bed179cd6e0517947ef2844bc3c2f1c81e98055bae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1205 12:25:58.103230 6226 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 12:25:58.103235 6226 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 12:25:58.103256 6226 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 12:25:58.103257 6226 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 12:25:58.103265 6226 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 12:25:58.103274 6226 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 12:25:58.103282 6226 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 12:25:58.103279 6226 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 12:25:58.103286 6226 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 12:25:58.103334 6226 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 12:25:58.103400 6226 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 12:25:58.103534 6226 factory.go:656] Stopping watch factory\\\\nI1205 12:25:58.103575 6226 ovnkube.go:599] Stopped ovnkube\\\\nI1205 12:25:58.103581 6226 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 12:25:58.103692 6226 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 12:25:58.103840 6226 ovnkube.go:137] failed to run ovnkube: [failed to start network contr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.490896 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.505329 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.519613 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.531935 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.540205 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.540405 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.540472 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:26:30.540454276 +0000 UTC m=+69.960521091 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.542995 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.549636 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.549678 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.549688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.549705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.549717 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.554964 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.570448 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.582788 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.595047 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.605522 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.619934 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:14Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.652293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.652327 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.652337 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.652352 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.652364 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.754854 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.754896 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.754904 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.754919 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.754929 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.857412 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.857443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.857455 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.857469 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.857480 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.960371 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.960433 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.960457 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.960486 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.960508 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:14Z","lastTransitionTime":"2025-12-05T12:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.998376 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.998502 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.998675 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.998724 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:14 crc kubenswrapper[4784]: I1205 12:26:14.999085 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:14 crc kubenswrapper[4784]: E1205 12:26:14.999137 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.062910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.062963 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.062975 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.062992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.063003 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.164905 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.164944 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.164953 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.164968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.164978 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.268158 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.268210 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.268219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.268234 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.268243 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.370530 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.370582 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.370592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.370612 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.370624 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.410708 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/2.log" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.417016 4784 scope.go:117] "RemoveContainer" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" Dec 05 12:26:15 crc kubenswrapper[4784]: E1205 12:26:15.417405 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.434384 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.448516 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.463894 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.473805 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.473863 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.473874 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.473895 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.473909 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.481771 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.493983 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.514998 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.535973 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.555495 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.571392 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.576864 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.576922 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.576933 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.576959 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.576972 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.592423 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.604536 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.617589 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.631249 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.642539 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.650645 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.660860 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:15Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.679442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.679504 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.679520 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.679548 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.679572 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.781864 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.781915 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.781927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.781946 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.781958 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.883997 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.884036 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.884045 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.884062 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.884072 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.986755 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.986797 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.986807 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.986828 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.986842 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:15Z","lastTransitionTime":"2025-12-05T12:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:15 crc kubenswrapper[4784]: I1205 12:26:15.998246 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:15 crc kubenswrapper[4784]: E1205 12:26:15.998431 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.090594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.090683 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.090712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.090746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.090767 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.193619 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.193685 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.193702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.193725 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.193743 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.297041 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.297085 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.297095 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.297114 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.297128 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.350947 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.364461 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.367599 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.381847 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.397571 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.399151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.399180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.399204 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.399216 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.399225 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.415009 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.441164 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.453392 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.467794 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.478672 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.489373 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.501574 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.502663 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.502712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.502722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.502741 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.502752 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.516008 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.528872 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.541261 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.553106 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.567501 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.579668 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:16Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.605353 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.605398 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.605408 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.605422 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.605432 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.708208 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.708271 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.708283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.708306 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.708322 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.812675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.812751 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.812772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.812799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.812814 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.916072 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.916153 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.916165 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.916215 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.916232 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:16Z","lastTransitionTime":"2025-12-05T12:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.998766 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.998853 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:16 crc kubenswrapper[4784]: I1205 12:26:16.998863 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:16 crc kubenswrapper[4784]: E1205 12:26:16.998979 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:16 crc kubenswrapper[4784]: E1205 12:26:16.999312 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:16 crc kubenswrapper[4784]: E1205 12:26:16.999507 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.019445 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.019507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.019521 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.019544 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.019556 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.123278 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.123371 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.123395 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.123423 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.123444 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.227155 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.227269 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.227294 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.227324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.227344 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.337405 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.337459 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.337480 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.337508 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.337523 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.440690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.440765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.440777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.440822 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.440869 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.543675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.543726 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.543735 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.543757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.543767 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.647647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.647705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.647718 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.647736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.647749 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.750575 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.750643 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.750661 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.750687 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.750732 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.853440 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.853502 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.853519 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.853541 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.853554 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.956287 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.956324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.956333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.956347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.956357 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:17Z","lastTransitionTime":"2025-12-05T12:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:17 crc kubenswrapper[4784]: I1205 12:26:17.998314 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:17 crc kubenswrapper[4784]: E1205 12:26:17.998441 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.060237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.060308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.060319 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.060347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.060371 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.163571 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.163634 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.163648 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.163674 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.163688 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.266940 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.267001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.267023 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.267046 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.267059 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.370143 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.370211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.370223 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.370239 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.370253 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.473875 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.473984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.474018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.474043 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.474054 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.577360 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.577421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.577431 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.577453 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.577466 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.680054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.680141 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.680155 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.680179 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.680246 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.782572 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.782607 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.782617 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.782631 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.782639 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.885173 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.885225 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.885233 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.885248 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.885257 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.987802 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.987856 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.987868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.987887 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.987904 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:18Z","lastTransitionTime":"2025-12-05T12:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.998212 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.998224 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:18 crc kubenswrapper[4784]: I1205 12:26:18.998270 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:18 crc kubenswrapper[4784]: E1205 12:26:18.998309 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:18 crc kubenswrapper[4784]: E1205 12:26:18.998373 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:18 crc kubenswrapper[4784]: E1205 12:26:18.998567 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.090628 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.090685 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.090696 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.090719 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.090731 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.193908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.193967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.193981 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.194004 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.194018 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.300277 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.300319 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.300328 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.300347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.300361 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.403708 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.403775 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.403789 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.403852 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.403865 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.506329 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.506403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.506414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.506434 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.506449 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.609911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.609959 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.609969 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.609988 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.610000 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.713387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.713477 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.713490 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.713516 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.713532 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.816745 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.816819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.816830 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.816850 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.816865 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.920335 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.920386 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.920399 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.920423 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.920441 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:19Z","lastTransitionTime":"2025-12-05T12:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:19 crc kubenswrapper[4784]: I1205 12:26:19.998548 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:19 crc kubenswrapper[4784]: E1205 12:26:19.998754 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.024182 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.024267 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.024279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.024297 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.024309 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.127160 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.127225 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.127237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.127253 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.127264 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.230328 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.230390 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.230400 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.230465 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.230482 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.333763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.333837 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.333852 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.333880 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.333898 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.436287 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.436354 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.436372 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.436395 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.436406 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.540078 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.540118 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.540132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.540151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.540163 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.642883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.642936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.642949 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.642969 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.642984 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.746018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.746097 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.746110 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.746134 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.746153 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.849104 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.849220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.849235 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.849264 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.849283 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.951580 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.951645 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.951663 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.951690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.951709 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:20Z","lastTransitionTime":"2025-12-05T12:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.998068 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.998091 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:20 crc kubenswrapper[4784]: E1205 12:26:20.998279 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:20 crc kubenswrapper[4784]: E1205 12:26:20.998354 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:20 crc kubenswrapper[4784]: I1205 12:26:20.998912 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:20 crc kubenswrapper[4784]: E1205 12:26:20.998990 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.012985 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.027437 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.038497 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.054016 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.055587 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.055641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.055658 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.055686 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.055703 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.069998 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.083590 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.098481 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.125029 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.136537 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.149342 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.158231 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.158274 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.158285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.158305 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.158316 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.167258 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.181254 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.195725 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.208244 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.221714 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.235226 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.248318 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:21Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.260290 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.260337 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.260349 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.260369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.260382 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.363442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.363783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.363876 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.363970 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.364052 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.466212 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.466266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.466279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.466296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.466305 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.569853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.570124 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.570228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.570314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.570389 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.672867 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.672918 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.672927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.672945 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.672954 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.776044 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.776613 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.776705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.776802 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.776894 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.879111 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.879417 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.879486 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.879565 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.879640 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.981401 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.981460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.981473 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.981492 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.981506 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:21Z","lastTransitionTime":"2025-12-05T12:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:21 crc kubenswrapper[4784]: I1205 12:26:21.997955 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:21 crc kubenswrapper[4784]: E1205 12:26:21.998538 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.084488 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.085054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.085303 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.085473 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.085615 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.190253 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.190330 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.190340 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.190362 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.190372 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.292828 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.292877 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.292886 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.292901 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.292914 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.395917 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.395979 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.395998 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.396016 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.396028 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.499442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.499486 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.499494 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.499512 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.499526 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.602632 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.602681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.602690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.602705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.602719 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.704470 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.704509 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.704518 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.704532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.704540 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.807391 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.807433 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.807442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.807456 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.807467 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.910033 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.910091 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.910099 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.910112 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.910121 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.936307 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.936375 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.936399 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.936429 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.936452 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: E1205 12:26:22.964466 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.970993 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.971081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.971098 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.971126 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.971145 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: E1205 12:26:22.988004 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:22Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997771 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997814 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997825 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997843 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997856 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:22Z","lastTransitionTime":"2025-12-05T12:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997916 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.997911 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:22 crc kubenswrapper[4784]: E1205 12:26:22.998011 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:22 crc kubenswrapper[4784]: I1205 12:26:22.998050 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:22 crc kubenswrapper[4784]: E1205 12:26:22.998079 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:22 crc kubenswrapper[4784]: E1205 12:26:22.998129 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:23 crc kubenswrapper[4784]: E1205 12:26:23.012616 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:23Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.017140 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.017178 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.017207 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.017224 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.017234 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: E1205 12:26:23.029024 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:23Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.033547 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.033579 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.033591 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.033610 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.033621 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: E1205 12:26:23.047701 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:23Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:23 crc kubenswrapper[4784]: E1205 12:26:23.047878 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.050646 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.050672 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.050681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.050698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.050710 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.153020 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.153056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.153065 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.153084 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.153093 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.255360 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.255403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.255414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.255431 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.255443 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.358376 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.358464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.358475 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.358499 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.358512 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.460615 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.460662 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.460674 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.460695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.460707 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.563228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.563286 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.563297 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.563312 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.563323 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.665697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.665755 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.665765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.665782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.665794 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.767656 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.768217 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.768314 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.768410 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.768498 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.870883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.870953 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.870963 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.870979 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.870993 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.973445 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.973728 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.973822 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.973908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.974000 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:23Z","lastTransitionTime":"2025-12-05T12:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:23 crc kubenswrapper[4784]: I1205 12:26:23.998063 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:23 crc kubenswrapper[4784]: E1205 12:26:23.998225 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.077140 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.077199 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.077208 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.077224 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.077235 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.180273 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.180621 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.180715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.180812 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.180909 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.282984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.283025 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.283035 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.283052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.283064 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.385608 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.385708 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.385720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.385734 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.385744 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.488146 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.488389 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.488515 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.488592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.488662 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.591568 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.591603 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.591662 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.591681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.591692 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.694336 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.694379 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.694387 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.694406 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.694415 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.797204 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.797845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.797939 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.798027 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.798099 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.901441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.901511 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.901525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.901554 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.901569 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:24Z","lastTransitionTime":"2025-12-05T12:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.998932 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.998932 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:24 crc kubenswrapper[4784]: I1205 12:26:24.999016 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:24 crc kubenswrapper[4784]: E1205 12:26:24.999586 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:24 crc kubenswrapper[4784]: E1205 12:26:24.999672 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:24 crc kubenswrapper[4784]: E1205 12:26:24.999801 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.006144 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.006176 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.006203 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.006216 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.006224 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.012640 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.108143 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.108206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.108219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.108235 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.108247 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.210799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.210901 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.210910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.210930 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.211556 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.314722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.314773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.314784 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.314806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.314828 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.417148 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.417202 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.417214 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.417230 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.417239 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.519322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.519358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.519366 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.519379 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.519387 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.621774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.621859 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.621873 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.621889 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.621899 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.725270 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.725322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.725338 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.725367 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.725380 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.828010 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.828055 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.828063 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.828076 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.828086 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.931221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.931294 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.931308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.931326 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.931338 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:25Z","lastTransitionTime":"2025-12-05T12:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:25 crc kubenswrapper[4784]: I1205 12:26:25.998015 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:25 crc kubenswrapper[4784]: E1205 12:26:25.998149 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.034344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.034399 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.034408 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.034429 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.034439 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.137211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.137270 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.137282 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.137301 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.137313 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.240139 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.240179 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.240203 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.240217 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.240226 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.343080 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.343145 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.343159 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.343179 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.343212 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.450316 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.450369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.450381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.450401 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.450412 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.552577 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.552614 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.552626 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.552642 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.552652 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.654699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.654762 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.654777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.654793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.654804 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.757050 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.757086 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.757094 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.757107 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.757118 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.860006 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.860079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.860092 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.860127 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.860141 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.962444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.962480 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.962491 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.962507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.962519 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:26Z","lastTransitionTime":"2025-12-05T12:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.999562 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:26 crc kubenswrapper[4784]: I1205 12:26:26.999593 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:27 crc kubenswrapper[4784]: E1205 12:26:26.999953 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:27 crc kubenswrapper[4784]: E1205 12:26:27.000067 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:26.999632 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:27 crc kubenswrapper[4784]: E1205 12:26:27.000446 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.065133 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.065181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.065211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.065228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.065238 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.167596 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.167875 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.167957 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.168088 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.168234 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.270670 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.270707 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.270716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.270735 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.270749 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.373790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.373834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.373843 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.373860 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.373870 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.476161 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.476218 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.476229 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.476245 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.476257 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.578573 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.578610 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.578619 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.578633 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.578641 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.681564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.681605 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.681620 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.681639 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.681651 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.783913 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.783960 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.783974 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.783992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.784003 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.886982 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.887029 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.887042 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.887059 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.887070 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.989568 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.989804 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.989814 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.989831 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.989846 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:27Z","lastTransitionTime":"2025-12-05T12:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.998546 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:27 crc kubenswrapper[4784]: E1205 12:26:27.998697 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:27 crc kubenswrapper[4784]: I1205 12:26:27.999364 4784 scope.go:117] "RemoveContainer" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" Dec 05 12:26:27 crc kubenswrapper[4784]: E1205 12:26:27.999545 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.091606 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.091638 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.091647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.091661 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.091670 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.193885 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.193927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.193938 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.193953 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.193963 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.302435 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.302557 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.302573 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.302595 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.302608 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.405493 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.405538 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.405547 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.405564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.405575 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.507697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.507748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.507763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.507782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.507792 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.610151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.610211 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.610219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.610234 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.610244 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.712219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.712252 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.712260 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.712275 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.712285 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.815034 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.815081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.815093 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.815110 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.815121 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.917131 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.917175 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.917196 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.917213 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.917224 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:28Z","lastTransitionTime":"2025-12-05T12:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.998298 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.998367 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:28 crc kubenswrapper[4784]: I1205 12:26:28.998319 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:28 crc kubenswrapper[4784]: E1205 12:26:28.998451 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:28 crc kubenswrapper[4784]: E1205 12:26:28.998574 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:28 crc kubenswrapper[4784]: E1205 12:26:28.998646 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.019117 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.019156 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.019168 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.019198 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.019207 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.121946 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.121992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.122000 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.122015 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.122024 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.224488 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.224532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.224542 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.224560 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.224570 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.326652 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.326922 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.326934 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.326951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.326961 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.429035 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.429097 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.429113 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.429137 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.429154 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.531965 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.532034 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.532049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.532066 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.532082 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.634279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.634332 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.634343 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.634357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.634369 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.736894 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.736945 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.736992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.737012 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.737027 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.840067 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.840114 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.840128 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.840147 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.840162 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.943356 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.943416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.943427 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.943448 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.943461 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:29Z","lastTransitionTime":"2025-12-05T12:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:29 crc kubenswrapper[4784]: I1205 12:26:29.998311 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:29 crc kubenswrapper[4784]: E1205 12:26:29.998474 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.046324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.046402 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.046415 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.046440 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.046454 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.149396 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.149459 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.149473 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.149494 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.149506 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.252409 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.252468 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.252485 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.252514 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.252527 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.355125 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.355175 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.355226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.355251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.355269 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.458167 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.458251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.458260 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.458276 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.458286 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.560358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.560455 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.560471 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.560489 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.560501 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.617212 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:30 crc kubenswrapper[4784]: E1205 12:26:30.617353 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:30 crc kubenswrapper[4784]: E1205 12:26:30.617424 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:27:02.617405229 +0000 UTC m=+102.037472044 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.662748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.662799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.662815 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.662837 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.662854 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.765405 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.765489 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.765512 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.765540 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.765557 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.868587 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.868633 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.868642 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.868659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.868671 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.971163 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.971206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.971214 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.971228 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.971238 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:30Z","lastTransitionTime":"2025-12-05T12:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.998049 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:30 crc kubenswrapper[4784]: E1205 12:26:30.998165 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.998365 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:30 crc kubenswrapper[4784]: E1205 12:26:30.998423 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:30 crc kubenswrapper[4784]: I1205 12:26:30.998765 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:30 crc kubenswrapper[4784]: E1205 12:26:30.998849 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.012522 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.028104 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.038789 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.052033 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.065434 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.073326 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.073373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.073384 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.073401 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.073412 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.078171 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.094881 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.105907 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.119661 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.130739 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.142696 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.155066 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.168556 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.177666 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.177705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.177716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.177732 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.177741 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.180993 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.195998 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.210938 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.224379 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.239320 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.280444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.280495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.280506 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.280525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.280534 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.383895 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.383968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.383984 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.384013 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.384031 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.473654 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/0.log" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.473696 4784 generic.go:334] "Generic (PLEG): container finished" podID="759cb09f-42c3-4254-82f8-b5285b61012a" containerID="8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d" exitCode=1 Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.473725 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerDied","Data":"8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.474065 4784 scope.go:117] "RemoveContainer" containerID="8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486435 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486855 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486906 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486920 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486961 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.486976 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.502590 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.514339 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.528493 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.543887 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.561378 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.580631 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.589116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.589249 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.589320 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.589391 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.589460 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.593163 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.605722 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.616781 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.627119 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.638599 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.648390 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.658774 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.669319 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.680018 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.690590 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.691976 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.692022 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.692032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.692047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.692060 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.704171 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:31Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.794025 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.794075 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.794088 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.794109 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.794122 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.896463 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.896494 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.896503 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.896516 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.896526 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:31Z","lastTransitionTime":"2025-12-05T12:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:31 crc kubenswrapper[4784]: I1205 12:26:31.997826 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:31 crc kubenswrapper[4784]: E1205 12:26:31.997956 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.000054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.000082 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.000093 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.000106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.000117 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.103335 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.103414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.103426 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.103443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.103456 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.207495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.207545 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.207557 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.207614 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.207629 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.309556 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.309593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.309601 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.309614 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.309622 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.411118 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.411169 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.411180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.411218 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.411232 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.479523 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/0.log" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.479858 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerStarted","Data":"58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.494981 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.508872 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.513160 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.513223 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.513236 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.513256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.513270 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.523933 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.537649 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.550405 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.569664 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.585233 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.595147 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.606909 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.615561 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.615627 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.615638 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.615654 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.615664 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.619500 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.640182 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.652872 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.663456 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.677714 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.690689 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.700937 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.709692 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.718165 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.718220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.718232 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.718249 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.718261 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.719754 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:32Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.820785 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.821206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.821293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.821397 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.821494 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.924237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.924280 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.924290 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.924309 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.924321 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:32Z","lastTransitionTime":"2025-12-05T12:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.998352 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.998456 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:32 crc kubenswrapper[4784]: E1205 12:26:32.998503 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:32 crc kubenswrapper[4784]: E1205 12:26:32.998579 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:32 crc kubenswrapper[4784]: I1205 12:26:32.998393 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:32 crc kubenswrapper[4784]: E1205 12:26:32.999220 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.026782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.026843 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.026853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.026868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.026878 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.129060 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.129096 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.129104 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.129119 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.129127 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.231611 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.231664 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.231676 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.231694 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.231706 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.333602 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.333643 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.333654 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.333669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.333678 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.339908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.339969 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.339987 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.340011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.340031 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.354059 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.357935 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.357977 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.357986 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.358001 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.358013 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.373821 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.378657 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.378716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.378728 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.378749 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.378764 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.391044 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.394964 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.395006 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.395018 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.395034 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.395046 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.407887 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.412122 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.412171 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.412206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.412226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.412237 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.424274 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:33Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.424396 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.436294 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.436361 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.436374 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.436392 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.436403 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.538935 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.538991 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.539002 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.539017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.539025 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.640989 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.641031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.641039 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.641054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.641066 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.743266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.743319 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.743331 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.743348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.743360 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.845935 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.846011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.846027 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.846056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.846070 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.948151 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.948212 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.948224 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.948241 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.948253 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:33Z","lastTransitionTime":"2025-12-05T12:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:33 crc kubenswrapper[4784]: I1205 12:26:33.998207 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:33 crc kubenswrapper[4784]: E1205 12:26:33.998359 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.051292 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.051376 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.051418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.051438 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.051449 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.153985 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.154020 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.154028 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.154043 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.154051 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.258007 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.258042 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.258052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.258066 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.258077 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.361705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.361763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.361772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.361789 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.361801 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.464722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.464771 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.464780 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.464793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.464802 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.566960 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.567005 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.567016 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.567034 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.567046 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.669129 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.669173 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.669181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.669214 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.669225 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.771599 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.771679 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.771689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.771713 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.771732 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.873842 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.873894 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.873905 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.873920 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.873930 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.975747 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.975782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.975793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.975807 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.975815 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:34Z","lastTransitionTime":"2025-12-05T12:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.998321 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.998374 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:34 crc kubenswrapper[4784]: I1205 12:26:34.998352 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:34 crc kubenswrapper[4784]: E1205 12:26:34.998460 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:34 crc kubenswrapper[4784]: E1205 12:26:34.998581 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:34 crc kubenswrapper[4784]: E1205 12:26:34.998638 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.077927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.077967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.077979 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.077994 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.078005 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.181688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.181741 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.181753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.181772 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.181782 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.285589 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.285647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.285659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.285680 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.285692 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.389361 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.389399 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.389407 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.389424 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.389437 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.491407 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.491454 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.491464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.491481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.491493 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.594533 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.594594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.594606 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.594626 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.594640 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.697156 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.697248 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.697266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.697284 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.697297 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.800385 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.800439 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.800450 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.800465 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.800478 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.902492 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.902538 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.902550 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.902569 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.902584 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:35Z","lastTransitionTime":"2025-12-05T12:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:35 crc kubenswrapper[4784]: I1205 12:26:35.998551 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:35 crc kubenswrapper[4784]: E1205 12:26:35.998712 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.005308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.005369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.005393 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.005416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.005429 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.107703 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.107736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.107745 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.107758 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.107772 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.210894 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.210948 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.210957 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.210974 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.210982 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.314141 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.314218 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.314231 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.314249 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.314263 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.417132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.417206 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.417221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.417241 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.417254 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.519917 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.519975 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.519986 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.520003 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.520014 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.622956 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.623014 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.623031 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.623052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.623064 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.725799 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.725844 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.725855 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.725869 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.725880 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.827957 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.827998 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.828008 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.828024 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.828035 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.930761 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.930804 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.930819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.930839 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.930849 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:36Z","lastTransitionTime":"2025-12-05T12:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.997954 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:36 crc kubenswrapper[4784]: E1205 12:26:36.998070 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.998254 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:36 crc kubenswrapper[4784]: E1205 12:26:36.998307 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:36 crc kubenswrapper[4784]: I1205 12:26:36.998485 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:36 crc kubenswrapper[4784]: E1205 12:26:36.998719 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.032779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.032847 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.032864 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.032889 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.032924 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.135467 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.135508 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.135518 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.135532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.135542 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.237912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.237954 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.237965 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.237980 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.237991 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.340921 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.340985 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.340999 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.341017 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.341028 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.443630 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.443697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.443712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.443736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.443752 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.546779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.546863 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.546881 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.546910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.546937 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.649797 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.649882 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.649895 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.649920 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.649933 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.752952 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.753026 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.753039 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.753059 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.753073 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.856160 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.856238 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.856250 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.856272 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.856285 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.959818 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.959871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.959883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.959905 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.959917 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:37Z","lastTransitionTime":"2025-12-05T12:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:37 crc kubenswrapper[4784]: I1205 12:26:37.998856 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:37 crc kubenswrapper[4784]: E1205 12:26:37.999046 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.062256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.062326 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.062339 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.062359 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.062373 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.164255 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.164296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.164309 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.164326 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.164336 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.267985 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.268044 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.268063 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.268081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.268094 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.370609 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.370652 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.370667 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.370682 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.370697 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.472605 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.472645 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.472656 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.472670 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.472681 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.576519 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.576567 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.576593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.576626 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.576643 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.679885 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.679967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.679982 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.680006 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.680022 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.783791 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.783841 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.783849 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.783867 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.783877 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.885828 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.885912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.885937 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.885967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.885989 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.988869 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.988914 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.988925 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.988951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.988969 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:38Z","lastTransitionTime":"2025-12-05T12:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.998033 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.998139 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:38 crc kubenswrapper[4784]: E1205 12:26:38.998177 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:38 crc kubenswrapper[4784]: I1205 12:26:38.998327 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:38 crc kubenswrapper[4784]: E1205 12:26:38.998688 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:38 crc kubenswrapper[4784]: E1205 12:26:38.998868 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.091042 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.091146 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.091165 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.091217 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.091241 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.193467 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.193507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.193522 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.193537 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.193547 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.297318 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.297418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.297437 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.297466 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.297485 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.399869 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.399955 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.400000 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.400019 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.400029 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.501567 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.501606 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.501618 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.501635 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.501647 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.605321 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.605384 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.605404 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.605430 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.605448 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.707473 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.707571 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.707582 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.707602 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.707614 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.809836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.809876 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.809888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.809903 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.809912 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.912529 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.912612 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.912637 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.912667 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.912689 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:39Z","lastTransitionTime":"2025-12-05T12:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:39 crc kubenswrapper[4784]: I1205 12:26:39.998622 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:39 crc kubenswrapper[4784]: E1205 12:26:39.998748 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.014991 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.015055 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.015068 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.015088 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.015100 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.118355 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.118443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.118456 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.118474 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.118488 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.220595 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.220665 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.220682 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.220697 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.220707 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.323002 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.323067 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.323085 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.323106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.323120 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.425703 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.425748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.425759 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.425776 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.425786 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.528374 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.528417 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.528428 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.528446 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.528459 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.630689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.630744 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.630759 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.630778 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.630790 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.733172 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.733237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.733251 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.733268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.733280 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.836026 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.836076 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.836085 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.836100 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.836110 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.938492 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.938535 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.938546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.938563 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.938574 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:40Z","lastTransitionTime":"2025-12-05T12:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.998066 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.998066 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:40 crc kubenswrapper[4784]: I1205 12:26:40.998093 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:40 crc kubenswrapper[4784]: E1205 12:26:40.998252 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:41 crc kubenswrapper[4784]: E1205 12:26:40.998501 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:41 crc kubenswrapper[4784]: E1205 12:26:40.998664 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:40.998854 4784 scope.go:117] "RemoveContainer" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.020857 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.032799 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.041918 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.043484 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.043517 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.043533 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.043543 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.043997 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.055991 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.066995 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.079260 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.090455 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.100564 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.110562 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.124497 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.139040 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.145866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.145886 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.145894 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.145907 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.145916 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.157788 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.169971 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.182797 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.193395 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.205100 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.224808 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.237532 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.248992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.249037 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.249077 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.249096 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.249109 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.350686 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.350720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.350728 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.350741 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.350751 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.453763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.453806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.453817 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.453833 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.453847 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.509319 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/2.log" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.511587 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.512059 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.524294 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.536141 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.548221 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.555731 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.555770 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.555779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.555793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.555803 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.569616 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.581406 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.593582 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.607307 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.619572 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.633551 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.643393 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.653464 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.657757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.657792 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.657800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.657815 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.657825 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.663883 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.675171 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.687873 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.698913 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.711326 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.721268 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.733980 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:41Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.760586 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.760621 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.760629 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.760642 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.760651 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.862689 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.862731 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.862740 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.862754 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.862763 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.965335 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.965381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.965392 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.965411 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.965422 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:41Z","lastTransitionTime":"2025-12-05T12:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:41 crc kubenswrapper[4784]: I1205 12:26:41.998148 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:41 crc kubenswrapper[4784]: E1205 12:26:41.998309 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.068284 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.068338 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.068355 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.068377 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.068395 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.171214 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.171264 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.171288 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.171319 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.171335 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.273511 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.273568 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.273594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.273616 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.273631 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.375669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.375720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.375730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.375744 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.375752 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.478114 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.478155 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.478170 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.478262 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.478274 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.517114 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.517780 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/2.log" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.520582 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" exitCode=1 Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.520633 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.520667 4784 scope.go:117] "RemoveContainer" containerID="dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.521467 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.521642 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.532847 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.544970 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.555133 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.571938 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dff23bc7ad1c9be48890b095dd871b43c01624a3fd5333527671348f2f6621bf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:14Z\\\",\\\"message\\\":\\\"g-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00774da5f \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 12:26:13.843935 6427 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 12:26:13.843976 6427 lb_config.go:1031] Cluster endpoints for openshift-service-ca-operator/metrics for network=default are: map[]\\\\nF1205 12:26:13.844007 6427 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:42Z\\\",\\\"message\\\":\\\"s/network-check-source-55646444c4-trplf\\\\nI1205 12:26:41.841131 6823 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 12:26:41.841166 6823 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 12:26:41.840956 6823 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841211 6823 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841220 6823 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb in node crc\\\\nI1205 12:26:41.841226 6823 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb after 0 failed attempt(s)\\\\nI1205 12:26:41.841231 6823 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nF1205 12:26:41.840784 6823 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.580530 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.580572 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.580581 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.580596 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.580605 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.581684 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.591369 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.604572 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.616639 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.626580 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.637123 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.649168 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.659944 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.671765 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.682420 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.682462 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.682472 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.682487 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.682497 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.684321 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.695271 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.705466 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.713633 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.727136 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:42Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.785065 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.785116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.785133 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.785154 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.785173 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.836838 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.836993 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:46.836964241 +0000 UTC m=+146.257031096 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.887672 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.887735 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.887748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.887763 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.887773 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.937857 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.937961 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.938004 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938021 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938052 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938073 4784 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.938039 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938083 4784 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938125 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 12:27:46.938106447 +0000 UTC m=+146.358173262 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938144 4784 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938205 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:27:46.938166529 +0000 UTC m=+146.358233414 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938241 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 12:27:46.938232671 +0000 UTC m=+146.358299586 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938802 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938836 4784 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938850 4784 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.938908 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 12:27:46.938890843 +0000 UTC m=+146.358957668 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.991119 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.991166 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.991180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.991220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.991236 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:42Z","lastTransitionTime":"2025-12-05T12:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.999021 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.999123 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:42 crc kubenswrapper[4784]: E1205 12:26:42.999457 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:42 crc kubenswrapper[4784]: I1205 12:26:42.999602 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.000106 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.001064 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.093749 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.093784 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.093794 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.093806 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.093815 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.197235 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.197297 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.197308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.197329 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.197343 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.300006 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.300051 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.300062 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.300080 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.300090 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.402966 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.403000 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.403009 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.403023 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.403032 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.483975 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.484025 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.484037 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.484054 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.484065 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.496494 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.500556 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.500586 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.500596 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.500612 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.500623 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.513225 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.516693 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.516743 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.516754 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.516858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.516875 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.525407 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.529275 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.529972 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.530130 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.532700 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.532739 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.532753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.532769 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.532780 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.541773 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.544285 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.569245 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.571786 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.571819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.571829 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.571845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.571855 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.590049 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.590243 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.591688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.591727 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.591740 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.591757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.591767 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.593923 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.608643 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.621175 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.633252 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.648108 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.658228 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.669567 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.681331 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.693818 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.693851 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.693862 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.693878 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.693890 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.701017 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:42Z\\\",\\\"message\\\":\\\"s/network-check-source-55646444c4-trplf\\\\nI1205 12:26:41.841131 6823 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 12:26:41.841166 6823 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 12:26:41.840956 6823 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841211 6823 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841220 6823 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb in node crc\\\\nI1205 12:26:41.841226 6823 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb after 0 failed attempt(s)\\\\nI1205 12:26:41.841231 6823 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nF1205 12:26:41.840784 6823 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.710901 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.720634 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.732280 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.746275 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.755474 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.762936 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.771394 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:43Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.796401 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.796458 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.796476 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.796500 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.796519 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.899420 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.899489 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.899507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.899531 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.899549 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:43Z","lastTransitionTime":"2025-12-05T12:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:43 crc kubenswrapper[4784]: I1205 12:26:43.998048 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:43 crc kubenswrapper[4784]: E1205 12:26:43.998256 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.001900 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.001986 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.002009 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.002049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.002071 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.104714 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.104767 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.104779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.104797 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.104807 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.207246 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.207316 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.207339 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.207369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.207391 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.310094 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.310169 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.310221 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.310254 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.310275 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.413357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.413427 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.413444 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.413469 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.413488 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.517499 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.517560 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.517578 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.517601 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.517618 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.620014 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.620086 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.620106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.620522 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.620568 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.724099 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.724154 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.724162 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.724178 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.724205 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.831080 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.831157 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.831168 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.831199 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.831229 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.933546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.933583 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.933593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.933611 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.933621 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:44Z","lastTransitionTime":"2025-12-05T12:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.998704 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.998728 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:44 crc kubenswrapper[4784]: E1205 12:26:44.998839 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:44 crc kubenswrapper[4784]: I1205 12:26:44.999059 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:44 crc kubenswrapper[4784]: E1205 12:26:44.999129 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:44 crc kubenswrapper[4784]: E1205 12:26:44.999420 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.036506 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.036561 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.036574 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.036595 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.036609 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.139308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.139378 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.139403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.139436 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.139463 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.242130 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.242208 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.242227 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.242249 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.242264 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.345641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.345721 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.345733 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.345749 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.345761 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.448304 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.448335 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.448342 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.448356 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.448366 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.550716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.550773 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.550782 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.550797 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.550806 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.653652 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.653691 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.653702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.653718 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.653730 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.756128 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.756181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.756219 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.756264 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.756282 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.858554 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.858608 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.858620 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.858638 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.858650 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.961902 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.962310 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.962324 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.962371 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.962386 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:45Z","lastTransitionTime":"2025-12-05T12:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:45 crc kubenswrapper[4784]: I1205 12:26:45.998584 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:45 crc kubenswrapper[4784]: E1205 12:26:45.998758 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.065055 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.065109 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.065128 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.065153 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.065172 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.167283 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.167361 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.167386 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.167419 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.167444 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.270135 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.270210 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.270220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.270238 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.270248 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.372357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.372397 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.372406 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.372425 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.372437 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.475104 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.475138 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.475146 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.475174 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.475200 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.578227 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.578620 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.578770 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.578915 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.579062 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.681403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.681677 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.681862 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.681968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.682055 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.784752 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.784826 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.784845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.784870 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.784890 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.888099 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.888147 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.888158 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.888174 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.888237 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.991629 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.992133 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.992359 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.992412 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.992437 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:46Z","lastTransitionTime":"2025-12-05T12:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.998541 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.998593 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:46 crc kubenswrapper[4784]: E1205 12:26:46.998689 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:46 crc kubenswrapper[4784]: E1205 12:26:46.998859 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:46 crc kubenswrapper[4784]: I1205 12:26:46.999047 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:46 crc kubenswrapper[4784]: E1205 12:26:46.999256 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.095752 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.095818 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.095834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.095858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.095875 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.198790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.198836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.198845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.198858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.198868 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.301655 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.302478 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.302550 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.302618 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.302673 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.405541 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.405967 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.406102 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.406259 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.406390 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.508348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.508403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.508421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.508442 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.508456 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.611083 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.611437 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.611674 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.611925 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.612221 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.715703 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.715913 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.715975 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.716081 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.716152 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.819052 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.819109 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.819120 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.819137 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.819150 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.922720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.922783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.922800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.922822 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.922837 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:47Z","lastTransitionTime":"2025-12-05T12:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:47 crc kubenswrapper[4784]: I1205 12:26:47.998092 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:47 crc kubenswrapper[4784]: E1205 12:26:47.998338 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.025765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.025831 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.025848 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.025874 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.025892 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.128822 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.128858 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.128872 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.128888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.128900 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.231832 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.231876 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.231887 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.231904 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.231916 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.335644 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.335737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.335757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.335785 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.335802 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.438391 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.438443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.438458 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.438479 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.438495 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.541657 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.541727 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.541746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.541771 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.541788 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.643769 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.643809 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.643821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.643839 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.643850 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.746658 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.746699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.746730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.746743 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.746753 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.848907 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.848945 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.848954 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.848968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.848976 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.952333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.952410 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.952428 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.952458 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.952491 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:48Z","lastTransitionTime":"2025-12-05T12:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.997858 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:48 crc kubenswrapper[4784]: E1205 12:26:48.997994 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.998134 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:48 crc kubenswrapper[4784]: I1205 12:26:48.998127 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:48 crc kubenswrapper[4784]: E1205 12:26:48.998379 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:48 crc kubenswrapper[4784]: E1205 12:26:48.998493 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.055549 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.055599 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.055611 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.055627 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.055638 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.158598 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.158641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.158653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.158669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.158683 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.261463 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.262258 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.262284 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.262309 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.262321 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.364160 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.364449 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.364543 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.364716 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.364781 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.467299 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.467336 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.467343 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.467356 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.467367 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.570231 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.570266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.570275 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.570289 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.570299 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.672588 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.672639 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.672651 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.672670 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.672682 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.775857 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.775968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.775983 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.775999 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.776036 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.878266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.878302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.878311 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.878327 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.878337 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.981055 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.981106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.981127 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.981147 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.981226 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:49Z","lastTransitionTime":"2025-12-05T12:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:49 crc kubenswrapper[4784]: I1205 12:26:49.998646 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:49 crc kubenswrapper[4784]: E1205 12:26:49.998828 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.083914 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.083962 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.083975 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.083992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.084001 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.186845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.186887 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.186897 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.186912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.186923 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.289152 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.289266 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.289282 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.289298 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.289311 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.391543 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.391579 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.391586 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.391600 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.391609 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.494375 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.494640 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.494712 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.494790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.494854 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.597911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.598159 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.598322 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.598419 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.598495 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.701638 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.701704 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.701722 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.701748 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.701765 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.805141 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.805247 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.805271 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.805300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.805325 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.908011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.908046 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.908056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.908068 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.908077 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:50Z","lastTransitionTime":"2025-12-05T12:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.997858 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.998015 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:50 crc kubenswrapper[4784]: I1205 12:26:50.998058 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:50 crc kubenswrapper[4784]: E1205 12:26:50.998124 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:50 crc kubenswrapper[4784]: E1205 12:26:50.998302 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:50 crc kubenswrapper[4784]: E1205 12:26:50.998429 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.009908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.010261 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.010421 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.010516 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.010583 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.010685 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.024774 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.036054 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.049787 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.064656 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.075841 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.096540 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:42Z\\\",\\\"message\\\":\\\"s/network-check-source-55646444c4-trplf\\\\nI1205 12:26:41.841131 6823 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 12:26:41.841166 6823 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 12:26:41.840956 6823 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841211 6823 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841220 6823 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb in node crc\\\\nI1205 12:26:41.841226 6823 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb after 0 failed attempt(s)\\\\nI1205 12:26:41.841231 6823 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nF1205 12:26:41.840784 6823 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.107111 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.113273 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.113309 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.113327 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.113346 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.113367 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.126302 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.138680 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.147979 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.158107 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.169648 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.181664 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.193294 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.204498 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.215310 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.215347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.215358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.215373 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.215383 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.217551 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.229857 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:51Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.317734 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.317767 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.317777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.317791 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.317801 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.419573 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.419615 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.419625 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.419639 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.419648 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.521691 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.521740 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.521752 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.521769 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.521779 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.624437 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.624494 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.624507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.624528 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.624539 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.726417 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.726475 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.726489 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.726508 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.726521 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.829072 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.829143 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.829175 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.829267 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.829294 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.932381 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.932432 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.932445 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.932462 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.932471 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:51Z","lastTransitionTime":"2025-12-05T12:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:51 crc kubenswrapper[4784]: I1205 12:26:51.998473 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:51 crc kubenswrapper[4784]: E1205 12:26:51.998610 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.034216 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.034268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.034278 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.034293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.034303 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.136441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.136482 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.136495 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.136512 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.136524 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.238226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.238263 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.238273 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.238288 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.238298 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.340392 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.340440 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.340449 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.340464 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.340474 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.442079 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.442111 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.442120 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.442134 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.442143 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.544142 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.544513 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.544615 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.544912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.545013 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.648058 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.648103 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.648116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.648144 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.648158 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.750416 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.750681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.750765 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.750846 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.750936 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.853717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.853754 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.853762 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.853779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.853791 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.956431 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.956470 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.956481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.956498 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.956509 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:52Z","lastTransitionTime":"2025-12-05T12:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.998384 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.998467 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:52 crc kubenswrapper[4784]: I1205 12:26:52.998407 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:52 crc kubenswrapper[4784]: E1205 12:26:52.998566 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:52 crc kubenswrapper[4784]: E1205 12:26:52.998662 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:52 crc kubenswrapper[4784]: E1205 12:26:52.998761 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.059871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.059912 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.059923 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.059938 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.059949 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.162595 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.162646 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.162665 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.162704 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.162735 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.266071 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.266131 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.266147 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.266166 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.266182 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.368783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.368824 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.368834 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.368848 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.368857 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.471815 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.471877 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.471890 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.471908 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.471924 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.574482 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.574774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.574852 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.574943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.575013 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.614129 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.614174 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.614197 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.614212 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.614223 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.626395 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.634008 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.634418 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.634531 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.634626 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.634689 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.652945 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.658618 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.659093 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.659356 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.659585 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.659750 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.672208 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.676793 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.676854 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.676871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.676893 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.676910 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.691306 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.695507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.695535 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.695553 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.695575 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.695592 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.713322 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:26:53Z is after 2025-08-24T17:21:41Z" Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.713512 4784 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.715774 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.715830 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.715843 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.715881 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.715896 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.818950 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.819049 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.819067 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.819087 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.819102 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.922507 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.922553 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.922565 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.922590 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.922615 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:53Z","lastTransitionTime":"2025-12-05T12:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:53 crc kubenswrapper[4784]: I1205 12:26:53.998331 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:53 crc kubenswrapper[4784]: E1205 12:26:53.998482 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.026384 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.026439 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.026457 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.026481 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.026499 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.128177 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.128244 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.128257 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.128280 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.128294 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.231866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.231915 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.231924 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.231938 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.231947 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.335024 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.335339 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.335349 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.335363 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.335371 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.437824 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.438597 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.438758 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.438865 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.438978 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.540888 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.540933 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.540944 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.540959 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.540970 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.643867 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.644108 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.644252 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.644369 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.644441 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.747701 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.747767 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.747788 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.747816 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.747839 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.851520 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.851868 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.852131 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.852412 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.852597 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.956705 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.956776 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.956800 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.956830 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.956853 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:54Z","lastTransitionTime":"2025-12-05T12:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.998314 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.998425 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:54 crc kubenswrapper[4784]: E1205 12:26:54.998500 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:54 crc kubenswrapper[4784]: I1205 12:26:54.998581 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:54 crc kubenswrapper[4784]: E1205 12:26:54.998642 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:54 crc kubenswrapper[4784]: E1205 12:26:54.998748 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.059477 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.059521 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.059530 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.059544 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.059555 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.162070 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.162132 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.162144 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.162166 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.162180 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.265160 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.265272 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.265285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.265302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.265314 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.367583 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.367629 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.367640 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.367653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.367662 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.470070 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.470564 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.470675 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.470739 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.470846 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.575014 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.575360 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.575429 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.575503 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.575630 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.678517 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.678580 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.678596 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.678619 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.678637 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.780443 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.780821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.780971 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.781116 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.781293 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.883768 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.883823 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.883838 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.883885 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.883897 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.987168 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.987237 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.987254 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.987300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.987317 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:55Z","lastTransitionTime":"2025-12-05T12:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.998356 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:55 crc kubenswrapper[4784]: E1205 12:26:55.998763 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:55 crc kubenswrapper[4784]: I1205 12:26:55.999047 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:26:55 crc kubenswrapper[4784]: E1205 12:26:55.999248 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.089648 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.089690 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.089699 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.089715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.089726 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.191983 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.192011 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.192019 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.192032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.192041 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.294887 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.294927 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.294936 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.294951 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.294962 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.396899 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.396944 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.396956 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.396973 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.396984 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.499593 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.499647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.499659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.499680 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.499693 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.602647 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.602695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.602711 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.602737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.602763 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.710310 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.710363 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.710376 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.710395 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.710407 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.813065 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.813110 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.813120 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.813141 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.813150 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.915453 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.915525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.915543 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.915566 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.915585 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:56Z","lastTransitionTime":"2025-12-05T12:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.998902 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.998954 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:56 crc kubenswrapper[4784]: I1205 12:26:56.998955 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:56 crc kubenswrapper[4784]: E1205 12:26:56.999068 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:56 crc kubenswrapper[4784]: E1205 12:26:56.999116 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:56 crc kubenswrapper[4784]: E1205 12:26:56.999230 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.018265 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.018316 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.018330 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.018348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.018360 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.120362 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.120404 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.120415 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.120429 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.120439 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.223250 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.223285 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.223293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.223306 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.223315 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.325480 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.325524 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.325534 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.325548 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.325560 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.428061 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.428111 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.428122 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.428138 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.428147 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.530737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.530790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.530801 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.530817 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.530829 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.632640 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.632680 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.632688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.632702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.632711 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.735210 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.735286 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.735297 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.735310 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.735319 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.837414 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.837450 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.837460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.837476 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.837486 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.939871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.939919 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.939930 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.939950 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.939963 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:57Z","lastTransitionTime":"2025-12-05T12:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:57 crc kubenswrapper[4784]: I1205 12:26:57.998366 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:57 crc kubenswrapper[4784]: E1205 12:26:57.998582 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.042586 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.042643 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.042658 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.042681 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.042699 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.145889 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.145935 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.145954 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.145977 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.145994 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.248149 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.248204 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.248220 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.248236 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.248258 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.351634 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.351685 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.351700 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.351717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.351729 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.453746 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.453779 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.453790 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.453828 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.453842 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.556753 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.556821 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.556837 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.556852 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.556863 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.659744 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.659783 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.659795 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.659811 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.659823 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.762595 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.762625 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.762633 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.762646 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.762655 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.864803 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.864866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.864885 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.864910 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.864928 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.966924 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.966972 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.966981 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.966996 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.967006 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:58Z","lastTransitionTime":"2025-12-05T12:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.998660 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.998719 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:26:58 crc kubenswrapper[4784]: I1205 12:26:58.998694 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:26:58 crc kubenswrapper[4784]: E1205 12:26:58.998885 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:26:58 crc kubenswrapper[4784]: E1205 12:26:58.998972 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:26:58 crc kubenswrapper[4784]: E1205 12:26:58.999073 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.070532 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.070653 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.070663 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.070678 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.070687 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.173308 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.173422 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.173472 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.173490 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.173501 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.276546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.276585 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.276594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.276611 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.276621 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.379698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.379730 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.379738 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.379751 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.379761 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.481871 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.481907 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.481948 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.481964 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.481972 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.584845 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.584911 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.584926 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.584943 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.584972 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.687235 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.687358 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.687374 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.687398 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.687414 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.790226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.790279 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.790295 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.790313 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.790334 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.892955 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.892992 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.893000 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.893013 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.893023 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.995301 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.995336 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.995344 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.995357 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.995366 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:26:59Z","lastTransitionTime":"2025-12-05T12:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:26:59 crc kubenswrapper[4784]: I1205 12:26:59.998718 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:26:59 crc kubenswrapper[4784]: E1205 12:26:59.998817 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.097770 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.097807 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.097815 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.097827 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.097837 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.200980 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.201024 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.201032 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.201047 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.201056 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.303039 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.303095 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.303104 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.303117 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.303126 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.405401 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.405453 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.405462 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.405475 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.405501 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.507637 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.507686 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.507696 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.507713 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.507723 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.610100 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.610169 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.610181 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.610225 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.610235 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.712930 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.712968 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.712976 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.712990 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.712999 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.814659 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.814694 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.814703 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.814717 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.814725 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.917488 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.917560 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.917571 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.917587 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.917596 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:00Z","lastTransitionTime":"2025-12-05T12:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.998351 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.998450 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:00 crc kubenswrapper[4784]: E1205 12:27:00.998644 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:00 crc kubenswrapper[4784]: E1205 12:27:00.998849 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:00 crc kubenswrapper[4784]: I1205 12:27:00.998345 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:00 crc kubenswrapper[4784]: E1205 12:27:00.999162 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.011422 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a95e925e1cf25326e556267b9b43fcff4b12eafe8aeb03b577a01ed8f0d98ab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.019978 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.020021 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.020043 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.020066 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.020079 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.022000 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xpw77" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"beba5d67-ad2e-4968-91da-3f451dd2cdc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52618f95fb8efa7ff3baa650dcf8f3373b7d17bc27138fa03e49c68792adf36c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjrpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xpw77\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.033123 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be412f31-7a36-4811-8914-be8cdc987d08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://76ddca8a6cc5af5e792bf0f50c2bc3cfcec5ea40705126799786373ad5d18e8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qd2p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sx8lm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.046966 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47a0de2a-77d6-40f1-abe1-767b65e73b74\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 12:25:38.416897 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 12:25:38.417058 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 12:25:38.422141 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1079216912/tls.crt::/tmp/serving-cert-1079216912/tls.key\\\\\\\"\\\\nI1205 12:25:38.771082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 12:25:38.774729 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 12:25:38.774756 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 12:25:38.774788 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 12:25:38.774811 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 12:25:38.780280 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 12:25:38.780316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780321 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 12:25:38.780326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 12:25:38.780329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 12:25:38.780332 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 12:25:38.780335 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 12:25:38.780478 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 12:25:38.783020 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:23Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.060526 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.073679 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf86c4db02b1091f887fc7dc73c7e2e7bb8d03e7212fb8bd443a43c4f51e976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87cb87114139d85aeacf41902c2c61f7ccb3cef1f22892820a521390c2a64d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.085588 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.100377 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0851219b-f3dd-4229-b5c5-b4d86a452bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f040ccf64d679f2035148f8391ebcf3fb0e262970a60355616a2d51c5114ddf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2386591cf338cbe93dd94038583c47b3f3e56762d52bf6e99de5eb8e08da02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7t7rf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pzsqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.113181 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"344741d3-75c3-4c58-9367-805ccfe373d5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc621616fccd5c77c9bb38904aacd0e0cd398d7c4b541d7cbba8b8bf5c31b0ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c288b264f88025f09114ce9b652a0e2b968037c64d4235af9a3125685ace889\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://125aee8151f99b21dca57ec7d237806d4cf8101cf97216b38dda10d47e43bf10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.122527 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.122563 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.122575 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.122594 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.122606 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.125004 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f51b46aa-f46b-4de1-bbbc-b23acb571394\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c3edce4b8b954f99066449c8dd5566695f300fc65c85ba3c90827ea380c87cf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6534b0ccbe3164a1bab663c746b201b5aef84ca859991c89cc3a7a84693f24a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a248634eb57b9ae01ffea846c57482ea060fcbd6281c5404bf96ab0706ee3f2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd6a4685a8454beb00ca78c095040c255785f695c1645ce1142a835f8d01fac9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.134958 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4ccc799-05c4-447c-b61d-e475213628f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d17f8c60696435050e9a5da93f54ee6a49b87142b36eb925b68197d10b3a935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906b2cfdbacb9adc9b4cac9f3f1845c45fbaf3a0871e50cf3725dc9bebd85631\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.146694 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vpljs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58eb6bc9-be04-4bd0-a0a1-4021cfc2095b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b4aeb18e84f1b440528e547960f9aed0bf44e53a870af6b06bc85f754e1c04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a6d9abb95cd63edc8f956baa75e8ed6ae7797ce8590ae8f9e33e5030664395e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a6bd7826f6919bab1fabc84c0506c343e980221d573740b7a61eae906247ab6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0edabd2aea4f6fe536d9b2f4fab3682f312ee4b511bef62fcb825be28ca2bb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92e33edc24d217ec4d8da1928ff3605d5ff305f20cbf3f8d0291ae998dadca64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff1156f448f9cfe7ed5fa7db65a96385e584855d831821f7dfbc97a37c0d7cdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://709b992455f436cd8015633afbf9bc34f6c11c58001d5a0448bd5a71d65bf86f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ntgll\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vpljs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.158187 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-g5gv5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759cb09f-42c3-4254-82f8-b5285b61012a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:26:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:30Z\\\",\\\"message\\\":\\\"2025-12-05T12:25:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84\\\\n2025-12-05T12:25:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f67a9645-3a8a-41e7-b088-6802b0384b84 to /host/opt/cni/bin/\\\\n2025-12-05T12:25:45Z [verbose] multus-daemon started\\\\n2025-12-05T12:25:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T12:26:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:26:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rpk8f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-g5gv5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.174523 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"291f2a35-7dd5-4af9-87f0-caae4ef75c66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T12:26:42Z\\\",\\\"message\\\":\\\"s/network-check-source-55646444c4-trplf\\\\nI1205 12:26:41.841131 6823 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 12:26:41.841166 6823 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 12:26:41.840956 6823 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841211 6823 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nI1205 12:26:41.841220 6823 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb in node crc\\\\nI1205 12:26:41.841226 6823 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb after 0 failed attempt(s)\\\\nI1205 12:26:41.841231 6823 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb\\\\nF1205 12:26:41.840784 6823 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T12:26:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gv9d9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fxbpl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.183540 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dzwxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f8364fb-1be8-4baa-aff0-10d4a4e8d614\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01f0d98ad3b8357dc9296635ef627f67300f4bbdd8131491552ba43830d1e658\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2xfq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dzwxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.192664 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmtkj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T12:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ln9ct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.202628 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1e93b1bb1f04a0c36b81190dc5ba99e09cff796f519b55340c78d2d6305a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T12:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.213211 4784 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T12:25:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:01Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.225424 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.225508 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.225520 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.225535 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.225544 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.327791 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.327836 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.327848 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.327866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.327879 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.430217 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.430268 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.430280 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.430296 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.430306 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.532702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.532737 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.532745 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.532757 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.532765 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.635340 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.635580 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.635592 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.635608 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.635619 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.737928 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.738226 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.738333 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.738457 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.738530 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.840183 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.840827 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.840914 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.840983 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.841051 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.943554 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.943928 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.944053 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.944231 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.944371 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:01Z","lastTransitionTime":"2025-12-05T12:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:01 crc kubenswrapper[4784]: I1205 12:27:01.998280 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:01 crc kubenswrapper[4784]: E1205 12:27:01.998579 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.046982 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.047313 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.047438 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.047525 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.047650 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.150271 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.150509 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.150623 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.150702 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.150764 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.253688 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.253736 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.253747 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.253766 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.253778 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.356805 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.356853 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.356863 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.356883 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.356893 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.459275 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.459673 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.459886 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.460056 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.460183 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.562347 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.562695 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.562861 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.563037 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.563377 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.639932 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:02 crc kubenswrapper[4784]: E1205 12:27:02.640084 4784 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:27:02 crc kubenswrapper[4784]: E1205 12:27:02.640148 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs podName:531c2cfd-8b93-4ec4-88ab-fb4e40de2543 nodeName:}" failed. No retries permitted until 2025-12-05 12:28:06.64013078 +0000 UTC m=+166.060197595 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs") pod "network-metrics-daemon-ln9ct" (UID: "531c2cfd-8b93-4ec4-88ab-fb4e40de2543") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.665457 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.665503 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.665511 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.665527 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.665537 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.768488 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.768715 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.768777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.768886 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.768946 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.870926 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.871222 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.871293 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.871368 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.871431 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.974180 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.974460 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.974546 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.974669 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.974754 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:02Z","lastTransitionTime":"2025-12-05T12:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.998790 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.998838 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:02 crc kubenswrapper[4784]: E1205 12:27:02.998976 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:02 crc kubenswrapper[4784]: I1205 12:27:02.999372 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:02 crc kubenswrapper[4784]: E1205 12:27:02.999540 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:02 crc kubenswrapper[4784]: E1205 12:27:02.999757 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.019290 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.086720 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.086777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.086794 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.086819 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.086835 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.188725 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.188777 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.188792 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.188812 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.188824 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.291798 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.291866 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.291877 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.291893 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.291903 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.395328 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.395390 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.395403 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.395422 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.395434 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.499572 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.499614 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.499623 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.499641 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.499655 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.602377 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.602428 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.602441 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.602458 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.602470 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.704976 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.705029 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.705040 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.705057 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.705067 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.807982 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.808044 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.808058 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.808122 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.808133 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.911256 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.911300 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.911309 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.911327 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.911338 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:03Z","lastTransitionTime":"2025-12-05T12:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:03 crc kubenswrapper[4784]: I1205 12:27:03.998919 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:03 crc kubenswrapper[4784]: E1205 12:27:03.999104 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.014657 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.014698 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.014708 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.014724 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.014734 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:04Z","lastTransitionTime":"2025-12-05T12:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.110012 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.110089 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.110106 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.110131 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.110147 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:04Z","lastTransitionTime":"2025-12-05T12:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:04 crc kubenswrapper[4784]: E1205 12:27:04.125957 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.131022 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.131087 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.131103 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.131125 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.131139 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:04Z","lastTransitionTime":"2025-12-05T12:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:04 crc kubenswrapper[4784]: E1205 12:27:04.149143 4784 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T12:27:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"08035136-431a-41d0-879c-bf86d5af7e54\\\",\\\"systemUUID\\\":\\\"aac4a951-c40f-4b5f-a660-6c137757957c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T12:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.153302 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.153348 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.153359 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.153374 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.153385 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:04Z","lastTransitionTime":"2025-12-05T12:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.181084 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.181125 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.181137 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.181152 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.181163 4784 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T12:27:04Z","lastTransitionTime":"2025-12-05T12:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.207175 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg"] Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.210764 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.213666 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.213733 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.213940 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.214087 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.239091 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=1.239071976 podStartE2EDuration="1.239071976s" podCreationTimestamp="2025-12-05 12:27:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.237508224 +0000 UTC m=+103.657575049" watchObservedRunningTime="2025-12-05 12:27:04.239071976 +0000 UTC m=+103.659138801" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.274076 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=86.27405716 podStartE2EDuration="1m26.27405716s" podCreationTimestamp="2025-12-05 12:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.260478422 +0000 UTC m=+103.680545247" watchObservedRunningTime="2025-12-05 12:27:04.27405716 +0000 UTC m=+103.694123975" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.306780 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-xpw77" podStartSLOduration=81.306760849 podStartE2EDuration="1m21.306760849s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.286908354 +0000 UTC m=+103.706975179" watchObservedRunningTime="2025-12-05 12:27:04.306760849 +0000 UTC m=+103.726827664" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.307001 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podStartSLOduration=81.306996227 podStartE2EDuration="1m21.306996227s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.305639382 +0000 UTC m=+103.725706217" watchObservedRunningTime="2025-12-05 12:27:04.306996227 +0000 UTC m=+103.727063032" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.321619 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=86.321603878 podStartE2EDuration="1m26.321603878s" podCreationTimestamp="2025-12-05 12:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.321556357 +0000 UTC m=+103.741623172" watchObservedRunningTime="2025-12-05 12:27:04.321603878 +0000 UTC m=+103.741670693" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.333015 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.332997495 podStartE2EDuration="48.332997495s" podCreationTimestamp="2025-12-05 12:26:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.332284352 +0000 UTC m=+103.752351167" watchObservedRunningTime="2025-12-05 12:27:04.332997495 +0000 UTC m=+103.753064310" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.358281 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.358608 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.358720 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5607de4-cb99-41f7-b9b6-81a167e1cd11-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.358826 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5607de4-cb99-41f7-b9b6-81a167e1cd11-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.358951 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5607de4-cb99-41f7-b9b6-81a167e1cd11-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.382940 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pzsqb" podStartSLOduration=80.382923302 podStartE2EDuration="1m20.382923302s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.3795303 +0000 UTC m=+103.799597115" watchObservedRunningTime="2025-12-05 12:27:04.382923302 +0000 UTC m=+103.802990107" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.404515 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=39.404493234 podStartE2EDuration="39.404493234s" podCreationTimestamp="2025-12-05 12:26:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.391061081 +0000 UTC m=+103.811127906" watchObservedRunningTime="2025-12-05 12:27:04.404493234 +0000 UTC m=+103.824560049" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.404890 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-vpljs" podStartSLOduration=81.404884447 podStartE2EDuration="1m21.404884447s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.404608258 +0000 UTC m=+103.824675093" watchObservedRunningTime="2025-12-05 12:27:04.404884447 +0000 UTC m=+103.824951252" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.447090 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-g5gv5" podStartSLOduration=81.447071749 podStartE2EDuration="1m21.447071749s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.446827811 +0000 UTC m=+103.866894636" watchObservedRunningTime="2025-12-05 12:27:04.447071749 +0000 UTC m=+103.867138554" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460204 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5607de4-cb99-41f7-b9b6-81a167e1cd11-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460247 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460269 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460286 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5607de4-cb99-41f7-b9b6-81a167e1cd11-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460301 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5607de4-cb99-41f7-b9b6-81a167e1cd11-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460443 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.460466 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a5607de4-cb99-41f7-b9b6-81a167e1cd11-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.461288 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5607de4-cb99-41f7-b9b6-81a167e1cd11-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.466851 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5607de4-cb99-41f7-b9b6-81a167e1cd11-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.478165 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5607de4-cb99-41f7-b9b6-81a167e1cd11-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zn8kg\" (UID: \"a5607de4-cb99-41f7-b9b6-81a167e1cd11\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.504856 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-dzwxp" podStartSLOduration=81.504835785 podStartE2EDuration="1m21.504835785s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:04.4928798 +0000 UTC m=+103.912946625" watchObservedRunningTime="2025-12-05 12:27:04.504835785 +0000 UTC m=+103.924902600" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.524003 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.589498 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" event={"ID":"a5607de4-cb99-41f7-b9b6-81a167e1cd11","Type":"ContainerStarted","Data":"2e9774374d96931fd74b72d8e5315160286ffd793d838111095ee767491fa541"} Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.998519 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:04 crc kubenswrapper[4784]: E1205 12:27:04.999348 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.998557 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:04 crc kubenswrapper[4784]: E1205 12:27:04.999453 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:04 crc kubenswrapper[4784]: I1205 12:27:04.998519 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:04 crc kubenswrapper[4784]: E1205 12:27:04.999514 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:05 crc kubenswrapper[4784]: I1205 12:27:05.593366 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" event={"ID":"a5607de4-cb99-41f7-b9b6-81a167e1cd11","Type":"ContainerStarted","Data":"e606a1bda5c2ba9721d66fe36025c6169fc1ae0c5560a7fa674e2a1c2bee4a84"} Dec 05 12:27:05 crc kubenswrapper[4784]: I1205 12:27:05.607350 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zn8kg" podStartSLOduration=82.60733233 podStartE2EDuration="1m22.60733233s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:05.606555524 +0000 UTC m=+105.026622339" watchObservedRunningTime="2025-12-05 12:27:05.60733233 +0000 UTC m=+105.027399145" Dec 05 12:27:05 crc kubenswrapper[4784]: I1205 12:27:05.998237 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:05 crc kubenswrapper[4784]: E1205 12:27:05.998579 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:06 crc kubenswrapper[4784]: I1205 12:27:06.998672 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:06 crc kubenswrapper[4784]: I1205 12:27:06.998711 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:06 crc kubenswrapper[4784]: I1205 12:27:06.998672 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:06 crc kubenswrapper[4784]: E1205 12:27:06.998820 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:06 crc kubenswrapper[4784]: E1205 12:27:06.998886 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:06 crc kubenswrapper[4784]: E1205 12:27:06.998952 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:07 crc kubenswrapper[4784]: I1205 12:27:07.998718 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:07 crc kubenswrapper[4784]: E1205 12:27:07.998980 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:08 crc kubenswrapper[4784]: I1205 12:27:08.998269 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:08 crc kubenswrapper[4784]: I1205 12:27:08.998309 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:08 crc kubenswrapper[4784]: E1205 12:27:08.998501 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:08 crc kubenswrapper[4784]: I1205 12:27:08.998523 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:08 crc kubenswrapper[4784]: E1205 12:27:08.999067 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:08 crc kubenswrapper[4784]: E1205 12:27:08.999141 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:08 crc kubenswrapper[4784]: I1205 12:27:08.999538 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:27:08 crc kubenswrapper[4784]: E1205 12:27:08.999757 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fxbpl_openshift-ovn-kubernetes(291f2a35-7dd5-4af9-87f0-caae4ef75c66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" Dec 05 12:27:09 crc kubenswrapper[4784]: I1205 12:27:09.998056 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:09 crc kubenswrapper[4784]: E1205 12:27:09.998230 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:10 crc kubenswrapper[4784]: I1205 12:27:10.997988 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:10 crc kubenswrapper[4784]: I1205 12:27:10.998103 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:10 crc kubenswrapper[4784]: I1205 12:27:10.999862 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:11 crc kubenswrapper[4784]: E1205 12:27:11.000030 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:11 crc kubenswrapper[4784]: E1205 12:27:11.000029 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:11 crc kubenswrapper[4784]: E1205 12:27:11.000474 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:11 crc kubenswrapper[4784]: I1205 12:27:11.998070 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:11 crc kubenswrapper[4784]: E1205 12:27:11.998295 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:12 crc kubenswrapper[4784]: I1205 12:27:12.998713 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:12 crc kubenswrapper[4784]: I1205 12:27:12.998748 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:12 crc kubenswrapper[4784]: I1205 12:27:12.998908 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:12 crc kubenswrapper[4784]: E1205 12:27:12.999008 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:12 crc kubenswrapper[4784]: E1205 12:27:12.999101 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:12 crc kubenswrapper[4784]: E1205 12:27:12.999140 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:13 crc kubenswrapper[4784]: I1205 12:27:13.998384 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:13 crc kubenswrapper[4784]: E1205 12:27:13.998797 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:14 crc kubenswrapper[4784]: I1205 12:27:14.998781 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:14 crc kubenswrapper[4784]: I1205 12:27:14.998764 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:14 crc kubenswrapper[4784]: I1205 12:27:14.998815 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:14 crc kubenswrapper[4784]: E1205 12:27:14.998947 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:14 crc kubenswrapper[4784]: E1205 12:27:14.999290 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:14 crc kubenswrapper[4784]: E1205 12:27:14.999487 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:15 crc kubenswrapper[4784]: I1205 12:27:15.998098 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:15 crc kubenswrapper[4784]: E1205 12:27:15.998355 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:16 crc kubenswrapper[4784]: I1205 12:27:16.999086 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:16 crc kubenswrapper[4784]: E1205 12:27:16.999303 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:16 crc kubenswrapper[4784]: I1205 12:27:16.999283 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:16 crc kubenswrapper[4784]: E1205 12:27:16.999936 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.000414 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:17 crc kubenswrapper[4784]: E1205 12:27:17.000577 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.628476 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/1.log" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.628919 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/0.log" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.628962 4784 generic.go:334] "Generic (PLEG): container finished" podID="759cb09f-42c3-4254-82f8-b5285b61012a" containerID="58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd" exitCode=1 Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.628995 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerDied","Data":"58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd"} Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.629031 4784 scope.go:117] "RemoveContainer" containerID="8c44657f4f0a70fc2d2f1dcc08f1ca23c5fe94e77d3dbdd1bbb3f36f6471a62d" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.629651 4784 scope.go:117] "RemoveContainer" containerID="58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd" Dec 05 12:27:17 crc kubenswrapper[4784]: E1205 12:27:17.630015 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-g5gv5_openshift-multus(759cb09f-42c3-4254-82f8-b5285b61012a)\"" pod="openshift-multus/multus-g5gv5" podUID="759cb09f-42c3-4254-82f8-b5285b61012a" Dec 05 12:27:17 crc kubenswrapper[4784]: I1205 12:27:17.998379 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:17 crc kubenswrapper[4784]: E1205 12:27:17.998894 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:18 crc kubenswrapper[4784]: I1205 12:27:18.635379 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/1.log" Dec 05 12:27:18 crc kubenswrapper[4784]: I1205 12:27:18.998319 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:18 crc kubenswrapper[4784]: I1205 12:27:18.998428 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:18 crc kubenswrapper[4784]: E1205 12:27:18.998526 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:18 crc kubenswrapper[4784]: E1205 12:27:18.998659 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:18 crc kubenswrapper[4784]: I1205 12:27:18.998680 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:18 crc kubenswrapper[4784]: E1205 12:27:18.998783 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:19 crc kubenswrapper[4784]: I1205 12:27:19.998418 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:19 crc kubenswrapper[4784]: E1205 12:27:19.998559 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:21 crc kubenswrapper[4784]: I1205 12:27:21.017626 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:21 crc kubenswrapper[4784]: I1205 12:27:21.017626 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:21 crc kubenswrapper[4784]: I1205 12:27:21.017735 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.019057 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.019092 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.019169 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.049164 4784 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.082773 4784 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 12:27:21 crc kubenswrapper[4784]: I1205 12:27:21.998494 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:21 crc kubenswrapper[4784]: E1205 12:27:21.998645 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:22 crc kubenswrapper[4784]: I1205 12:27:22.998401 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:22 crc kubenswrapper[4784]: I1205 12:27:22.998477 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:22 crc kubenswrapper[4784]: I1205 12:27:22.998576 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:22 crc kubenswrapper[4784]: E1205 12:27:22.999593 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:22 crc kubenswrapper[4784]: E1205 12:27:22.999664 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:22 crc kubenswrapper[4784]: E1205 12:27:22.999421 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:23 crc kubenswrapper[4784]: I1205 12:27:23.997779 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:23 crc kubenswrapper[4784]: E1205 12:27:23.998051 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:23 crc kubenswrapper[4784]: I1205 12:27:23.998726 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.657056 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.660841 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerStarted","Data":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.661351 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.694229 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podStartSLOduration=101.694208588 podStartE2EDuration="1m41.694208588s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:24.691600532 +0000 UTC m=+124.111667347" watchObservedRunningTime="2025-12-05 12:27:24.694208588 +0000 UTC m=+124.114275403" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.819691 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ln9ct"] Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.819827 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:24 crc kubenswrapper[4784]: E1205 12:27:24.819929 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.997928 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:24 crc kubenswrapper[4784]: E1205 12:27:24.998107 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:24 crc kubenswrapper[4784]: I1205 12:27:24.997925 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:24 crc kubenswrapper[4784]: E1205 12:27:24.998367 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:25 crc kubenswrapper[4784]: I1205 12:27:25.997893 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:25 crc kubenswrapper[4784]: E1205 12:27:25.998044 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:26 crc kubenswrapper[4784]: E1205 12:27:26.083573 4784 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 12:27:26 crc kubenswrapper[4784]: I1205 12:27:26.998909 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:26 crc kubenswrapper[4784]: I1205 12:27:26.998919 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:26 crc kubenswrapper[4784]: I1205 12:27:26.998958 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:26 crc kubenswrapper[4784]: E1205 12:27:26.999645 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:26 crc kubenswrapper[4784]: E1205 12:27:26.999733 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:26 crc kubenswrapper[4784]: E1205 12:27:26.999455 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:27 crc kubenswrapper[4784]: I1205 12:27:27.997990 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:27 crc kubenswrapper[4784]: E1205 12:27:27.998163 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:28 crc kubenswrapper[4784]: I1205 12:27:28.998504 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:28 crc kubenswrapper[4784]: E1205 12:27:28.998656 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:28 crc kubenswrapper[4784]: I1205 12:27:28.998895 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:28 crc kubenswrapper[4784]: E1205 12:27:28.998952 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:28 crc kubenswrapper[4784]: I1205 12:27:28.999178 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:28 crc kubenswrapper[4784]: E1205 12:27:28.999271 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:29 crc kubenswrapper[4784]: I1205 12:27:29.998358 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:29 crc kubenswrapper[4784]: E1205 12:27:29.998527 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:30 crc kubenswrapper[4784]: I1205 12:27:30.998801 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:31 crc kubenswrapper[4784]: E1205 12:27:31.000845 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.000873 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.000953 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:31 crc kubenswrapper[4784]: E1205 12:27:31.001223 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:31 crc kubenswrapper[4784]: E1205 12:27:31.001342 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.002352 4784 scope.go:117] "RemoveContainer" containerID="58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd" Dec 05 12:27:31 crc kubenswrapper[4784]: E1205 12:27:31.084050 4784 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.684209 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/1.log" Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.684264 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerStarted","Data":"edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4"} Dec 05 12:27:31 crc kubenswrapper[4784]: I1205 12:27:31.998359 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:31 crc kubenswrapper[4784]: E1205 12:27:31.998478 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:32 crc kubenswrapper[4784]: I1205 12:27:32.998916 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:32 crc kubenswrapper[4784]: I1205 12:27:32.998922 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:32 crc kubenswrapper[4784]: I1205 12:27:32.999212 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:33 crc kubenswrapper[4784]: E1205 12:27:32.999389 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:33 crc kubenswrapper[4784]: E1205 12:27:32.999605 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:33 crc kubenswrapper[4784]: E1205 12:27:32.999727 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:33 crc kubenswrapper[4784]: I1205 12:27:33.998230 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:33 crc kubenswrapper[4784]: E1205 12:27:33.998972 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:34 crc kubenswrapper[4784]: I1205 12:27:34.997869 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:34 crc kubenswrapper[4784]: I1205 12:27:34.997905 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:34 crc kubenswrapper[4784]: E1205 12:27:34.997998 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 12:27:34 crc kubenswrapper[4784]: I1205 12:27:34.997869 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:34 crc kubenswrapper[4784]: E1205 12:27:34.998088 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ln9ct" podUID="531c2cfd-8b93-4ec4-88ab-fb4e40de2543" Dec 05 12:27:34 crc kubenswrapper[4784]: E1205 12:27:34.998156 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 12:27:35 crc kubenswrapper[4784]: I1205 12:27:35.998898 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:36 crc kubenswrapper[4784]: E1205 12:27:35.999128 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 12:27:36 crc kubenswrapper[4784]: I1205 12:27:36.998756 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:27:36 crc kubenswrapper[4784]: I1205 12:27:36.998985 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:36.998779 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.004178 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.004364 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.004917 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.005075 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.005112 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.005211 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 12:27:37 crc kubenswrapper[4784]: I1205 12:27:37.998839 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.792955 4784 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.831906 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g67wl"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.832465 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.832748 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.833157 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.838920 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.839029 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.839060 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.839329 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.839463 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.840951 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.841038 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.841168 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.841323 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.842921 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.844013 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-89565"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.844556 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.845144 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.849855 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.850262 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.850437 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.851169 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.851380 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.851529 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.851741 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.851910 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852044 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852160 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852449 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852647 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852779 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852823 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.852966 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.853480 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.854342 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.856510 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-k7w2s"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.857019 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.860366 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.879414 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tcnkb"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.881873 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kld95"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.889303 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.889472 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.898764 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.898900 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900107 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900301 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900424 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900560 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900694 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900812 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900877 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900921 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.900917 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.901089 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.901227 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.901429 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.903578 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.905863 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.906393 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.906817 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.907711 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kjvqd"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.908456 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.908614 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.908742 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.908957 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.909092 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.909668 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.909818 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.909868 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.909954 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.910077 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.910291 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.910648 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911076 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911320 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911390 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911608 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911737 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911815 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.911864 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.912640 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.914151 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.914355 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.921582 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.922373 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n8hdp"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.922819 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.922820 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.923207 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.923736 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.927590 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.932181 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.932493 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.937359 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.939738 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.939817 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.939859 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.940034 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.940609 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941284 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941409 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941507 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941611 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941758 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.941837 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.942067 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.942133 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.942728 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.943490 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.946406 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.948948 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.990340 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991132 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991371 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991530 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991663 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991747 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.991858 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.992344 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.992857 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.993508 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.993641 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.993984 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rmjws"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.994273 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fvbbf"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.994588 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.995382 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.996103 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.996559 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g67wl"] Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.996699 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:44 crc kubenswrapper[4784]: I1205 12:27:44.998109 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.000241 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-c9str"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.001106 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.001294 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.002205 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.002919 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.003111 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.003371 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.003534 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.003654 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.004791 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.006816 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.006948 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.007210 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.007343 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.009462 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.015818 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018512 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd7j6\" (UniqueName: \"kubernetes.io/projected/260e147b-2517-481c-93f4-3335794f5a1e-kube-api-access-zd7j6\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018555 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018584 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-service-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018614 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-images\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018641 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2qqf\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-kube-api-access-z2qqf\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018674 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b192643d-cbd3-4289-8152-7a5e038f1a7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018695 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2895\" (UniqueName: \"kubernetes.io/projected/30b70430-7471-4b38-a1a2-22d557f5e1ca-kube-api-access-w2895\") pod \"downloads-7954f5f757-k7w2s\" (UID: \"30b70430-7471-4b38-a1a2-22d557f5e1ca\") " pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018717 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018740 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/260e147b-2517-481c-93f4-3335794f5a1e-serving-cert\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018763 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018785 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018805 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018820 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018834 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018885 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.018977 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019000 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4c735838-6f43-4f60-9adb-4dcabc8f05c2-machine-approver-tls\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019053 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019131 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-serving-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019210 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019252 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-dir\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019284 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k79jm\" (UniqueName: \"kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019338 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019367 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit-dir\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019406 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019435 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019456 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019472 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-image-import-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019493 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9hwm\" (UniqueName: \"kubernetes.io/projected/b192643d-cbd3-4289-8152-7a5e038f1a7e-kube-api-access-x9hwm\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019510 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019530 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-encryption-config\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019755 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-config\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019786 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-auth-proxy-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019837 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fn9z\" (UniqueName: \"kubernetes.io/projected/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-kube-api-access-2fn9z\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019854 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019907 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b192643d-cbd3-4289-8152-7a5e038f1a7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.019988 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020005 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1b35d831-6af3-41e4-a111-ebfb9fefb029-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020020 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020035 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-policies\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020051 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-serving-cert\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020073 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020094 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-serving-cert\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020109 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvphx\" (UniqueName: \"kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020131 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020154 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020181 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-encryption-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020218 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020240 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct4mv\" (UniqueName: \"kubernetes.io/projected/4c735838-6f43-4f60-9adb-4dcabc8f05c2-kube-api-access-ct4mv\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020259 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x78ct\" (UniqueName: \"kubernetes.io/projected/1b35d831-6af3-41e4-a111-ebfb9fefb029-kube-api-access-x78ct\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020514 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020602 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020670 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-config\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020687 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020705 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-node-pullsecrets\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020755 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-client\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020773 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6pq5\" (UniqueName: \"kubernetes.io/projected/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-kube-api-access-t6pq5\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020820 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020835 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020849 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-client\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020916 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnhqk\" (UniqueName: \"kubernetes.io/projected/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-kube-api-access-fnhqk\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020936 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020953 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzk9j\" (UniqueName: \"kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.020970 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.023096 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.023840 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.024305 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.024431 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.024520 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.025288 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.025454 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026051 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026399 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026690 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-599vr"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026752 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026814 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.026852 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.027689 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.027962 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-txh4x"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028251 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028355 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028531 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028615 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028628 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.028645 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.029301 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.029478 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.030012 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-df92s"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.030255 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.030545 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.030872 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.032150 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.038363 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-89565"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.041295 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.060091 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tcnkb"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.073929 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.081258 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.081341 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n8hdp"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.081564 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.087253 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.087278 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.088420 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.090255 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-wpt8s"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.090999 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z2p7n"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.091534 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.091786 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.093494 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.094014 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-f2nct"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.094474 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.094725 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.096499 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.099346 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.102033 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.102692 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.102806 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.107340 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.107386 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.107396 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.116257 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kjvqd"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.117672 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.119325 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.119957 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.122766 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kld95"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123238 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123269 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b8qn\" (UniqueName: \"kubernetes.io/projected/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-kube-api-access-2b8qn\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123298 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-serving-cert\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123323 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvphx\" (UniqueName: \"kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123349 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123369 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123389 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123410 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/798fd230-84b4-4a83-98db-2e6fd780ca50-metrics-tls\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123434 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123457 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct4mv\" (UniqueName: \"kubernetes.io/projected/4c735838-6f43-4f60-9adb-4dcabc8f05c2-kube-api-access-ct4mv\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123477 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-encryption-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123500 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtcc6\" (UniqueName: \"kubernetes.io/projected/798fd230-84b4-4a83-98db-2e6fd780ca50-kube-api-access-qtcc6\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123531 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x78ct\" (UniqueName: \"kubernetes.io/projected/1b35d831-6af3-41e4-a111-ebfb9fefb029-kube-api-access-x78ct\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123557 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123581 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123603 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123647 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123670 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-node-pullsecrets\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123693 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-client\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123716 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-client\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123737 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6pq5\" (UniqueName: \"kubernetes.io/projected/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-kube-api-access-t6pq5\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123760 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-config\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123781 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123806 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-client\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123830 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnhqk\" (UniqueName: \"kubernetes.io/projected/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-kube-api-access-fnhqk\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123851 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123875 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123904 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzk9j\" (UniqueName: \"kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123930 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123954 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd7j6\" (UniqueName: \"kubernetes.io/projected/260e147b-2517-481c-93f4-3335794f5a1e-kube-api-access-zd7j6\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.123977 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124000 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-images\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124026 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-service-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124052 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b192643d-cbd3-4289-8152-7a5e038f1a7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124075 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2qqf\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-kube-api-access-z2qqf\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124102 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124124 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/260e147b-2517-481c-93f4-3335794f5a1e-serving-cert\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124146 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124171 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2895\" (UniqueName: \"kubernetes.io/projected/30b70430-7471-4b38-a1a2-22d557f5e1ca-kube-api-access-w2895\") pod \"downloads-7954f5f757-k7w2s\" (UID: \"30b70430-7471-4b38-a1a2-22d557f5e1ca\") " pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124216 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124244 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124271 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124297 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124322 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124343 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124362 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124385 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph9g2\" (UniqueName: \"kubernetes.io/projected/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-kube-api-access-ph9g2\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124409 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124429 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124449 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4c735838-6f43-4f60-9adb-4dcabc8f05c2-machine-approver-tls\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124471 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-serving-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124496 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124518 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124555 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-serving-cert\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124580 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-service-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124604 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-dir\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124628 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbw6t\" (UniqueName: \"kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124647 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k79jm\" (UniqueName: \"kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124665 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124683 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit-dir\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124700 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124718 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124736 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124753 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-image-import-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124791 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124818 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9hwm\" (UniqueName: \"kubernetes.io/projected/b192643d-cbd3-4289-8152-7a5e038f1a7e-kube-api-access-x9hwm\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.124875 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.126675 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.126938 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-599vr"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.127654 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-image-import-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.127703 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-images\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128293 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128328 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-encryption-config\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128358 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-config\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128383 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-auth-proxy-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128407 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128437 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128457 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128474 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b192643d-cbd3-4289-8152-7a5e038f1a7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128492 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fn9z\" (UniqueName: \"kubernetes.io/projected/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-kube-api-access-2fn9z\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128510 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-config\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128561 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128579 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1b35d831-6af3-41e4-a111-ebfb9fefb029-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128595 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-policies\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128609 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-serving-cert\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.128634 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.129830 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.130227 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.130246 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.130635 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.130906 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.131104 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.131695 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-service-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.131794 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.132517 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-node-pullsecrets\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.132777 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.132922 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.132989 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.133408 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.133623 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.134493 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b192643d-cbd3-4289-8152-7a5e038f1a7e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.134909 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.134911 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-serving-cert\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.135641 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b192643d-cbd3-4289-8152-7a5e038f1a7e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.136386 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.136437 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.136540 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/260e147b-2517-481c-93f4-3335794f5a1e-serving-cert\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.137247 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-serving-ca\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.138588 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.139110 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.139167 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-audit-dir\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.139222 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.139808 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-encryption-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.139840 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.140229 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4c735838-6f43-4f60-9adb-4dcabc8f05c2-machine-approver-tls\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.140426 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.140433 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.140591 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-dir\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.141306 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.141547 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.141656 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.142288 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/260e147b-2517-481c-93f4-3335794f5a1e-config\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.142432 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b35d831-6af3-41e4-a111-ebfb9fefb029-config\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.143063 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-df92s"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.143354 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c735838-6f43-4f60-9adb-4dcabc8f05c2-auth-proxy-config\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.145813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-audit-policies\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.146756 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.146944 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.147050 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-encryption-config\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.147156 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-etcd-client\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.147940 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-etcd-client\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.148165 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.148170 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1b35d831-6af3-41e4-a111-ebfb9fefb029-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.148249 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.148888 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.149689 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-config\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.149800 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.149906 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-serving-cert\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.150722 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.153018 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fvbbf"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.154465 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k7w2s"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.155094 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.155655 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-c9str"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.157105 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rmjws"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.158318 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.159493 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.160881 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.162171 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.163660 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.165035 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.166088 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-txh4x"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.167245 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.168311 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.169387 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-2c9tf"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.170385 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.170726 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-l8jkp"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.172328 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f2nct"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.172529 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.173360 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.174830 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.175842 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.176798 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z2p7n"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.180954 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-l8jkp"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.189696 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.194759 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tbmvb"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.196007 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.196410 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.196937 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tbmvb"] Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.214967 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229170 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/798fd230-84b4-4a83-98db-2e6fd780ca50-metrics-tls\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229254 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtcc6\" (UniqueName: \"kubernetes.io/projected/798fd230-84b4-4a83-98db-2e6fd780ca50-kube-api-access-qtcc6\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229296 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229322 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-client\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229374 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229391 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229410 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229427 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph9g2\" (UniqueName: \"kubernetes.io/projected/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-kube-api-access-ph9g2\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229446 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-serving-cert\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229463 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbw6t\" (UniqueName: \"kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229478 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-service-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229503 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229526 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229566 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229595 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-config\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229629 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.229656 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b8qn\" (UniqueName: \"kubernetes.io/projected/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-kube-api-access-2b8qn\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.230516 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-service-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.230905 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.230918 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-ca\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.231037 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.231250 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.231376 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-config\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.231791 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.232129 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/798fd230-84b4-4a83-98db-2e6fd780ca50-metrics-tls\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.232633 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-serving-cert\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.233050 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.235465 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.235791 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.237320 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-etcd-client\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.255576 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.276100 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.284537 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.295965 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.300405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.315291 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.374750 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.395935 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.415483 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.439562 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.457862 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.476180 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.495166 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.515825 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.535762 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.556002 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.575898 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.595917 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.615528 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.635742 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.656266 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.676017 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.696772 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.715977 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.735349 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.755535 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.775979 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.797235 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.816491 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.836443 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.855278 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.878937 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.894891 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.916445 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.944480 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.955020 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.976311 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 12:27:45 crc kubenswrapper[4784]: I1205 12:27:45.996845 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.015872 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.033777 4784 request.go:700] Waited for 1.004775699s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/secrets?fieldSelector=metadata.name%3Dkube-controller-manager-operator-dockercfg-gkqpw&limit=500&resourceVersion=0 Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.036441 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.055742 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.076370 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.096923 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.118268 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.136602 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.156178 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.175768 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.196513 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.215612 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.239121 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.257329 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.276113 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.296707 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.316025 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.337358 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.356930 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.376395 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.395977 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.414785 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.436056 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.456130 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.475392 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.495483 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.515775 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.535444 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.555557 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.576764 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.596101 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.615798 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.636031 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.655017 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.675212 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.696089 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.715944 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.735112 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.772479 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzk9j\" (UniqueName: \"kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j\") pod \"route-controller-manager-6576b87f9c-b2lpc\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.789989 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9hwm\" (UniqueName: \"kubernetes.io/projected/b192643d-cbd3-4289-8152-7a5e038f1a7e-kube-api-access-x9hwm\") pod \"openshift-apiserver-operator-796bbdcf4f-f5pj2\" (UID: \"b192643d-cbd3-4289-8152-7a5e038f1a7e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.817254 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd7j6\" (UniqueName: \"kubernetes.io/projected/260e147b-2517-481c-93f4-3335794f5a1e-kube-api-access-zd7j6\") pod \"authentication-operator-69f744f599-tcnkb\" (UID: \"260e147b-2517-481c-93f4-3335794f5a1e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.830835 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2qqf\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-kube-api-access-z2qqf\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.849437 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvphx\" (UniqueName: \"kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx\") pod \"controller-manager-879f6c89f-2drbk\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.849762 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.858042 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:46 crc kubenswrapper[4784]: E1205 12:27:46.858369 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:29:48.858332885 +0000 UTC m=+268.278399700 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.869821 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m2c4c\" (UID: \"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.890100 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x78ct\" (UniqueName: \"kubernetes.io/projected/1b35d831-6af3-41e4-a111-ebfb9fefb029-kube-api-access-x78ct\") pod \"machine-api-operator-5694c8668f-kjvqd\" (UID: \"1b35d831-6af3-41e4-a111-ebfb9fefb029\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.897270 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.910803 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2895\" (UniqueName: \"kubernetes.io/projected/30b70430-7471-4b38-a1a2-22d557f5e1ca-kube-api-access-w2895\") pod \"downloads-7954f5f757-k7w2s\" (UID: \"30b70430-7471-4b38-a1a2-22d557f5e1ca\") " pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.920578 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.932288 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct4mv\" (UniqueName: \"kubernetes.io/projected/4c735838-6f43-4f60-9adb-4dcabc8f05c2-kube-api-access-ct4mv\") pod \"machine-approver-56656f9798-2mmsl\" (UID: \"4c735838-6f43-4f60-9adb-4dcabc8f05c2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.960944 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.961036 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.961068 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.961066 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fn9z\" (UniqueName: \"kubernetes.io/projected/b63dfa9f-c0b4-41d2-9a30-10388f0ad077-kube-api-access-2fn9z\") pod \"apiserver-76f77b778f-g67wl\" (UID: \"b63dfa9f-c0b4-41d2-9a30-10388f0ad077\") " pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.961093 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.963340 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.965433 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.966082 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.968556 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.974224 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnhqk\" (UniqueName: \"kubernetes.io/projected/e61ab90d-5502-48b1-9d9b-bb257fd3ac74-kube-api-access-fnhqk\") pod \"cluster-samples-operator-665b6dd947-rmq4p\" (UID: \"e61ab90d-5502-48b1-9d9b-bb257fd3ac74\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.981968 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" Dec 05 12:27:46 crc kubenswrapper[4784]: I1205 12:27:46.995980 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6pq5\" (UniqueName: \"kubernetes.io/projected/b922af1e-a218-4fc7-8e32-cb3fc03d02e3-kube-api-access-t6pq5\") pod \"apiserver-7bbb656c7d-89565\" (UID: \"b922af1e-a218-4fc7-8e32-cb3fc03d02e3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.000448 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.004545 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.012720 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.012946 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k79jm\" (UniqueName: \"kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm\") pod \"oauth-openshift-558db77b4-kld95\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.013581 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c735838_6f43_4f60_9adb_4dcabc8f05c2.slice/crio-ce7c23fe818d7448c4b29455a9655d15a5e56f2f43546193b2437998c0ce49c8 WatchSource:0}: Error finding container ce7c23fe818d7448c4b29455a9655d15a5e56f2f43546193b2437998c0ce49c8: Status 404 returned error can't find the container with id ce7c23fe818d7448c4b29455a9655d15a5e56f2f43546193b2437998c0ce49c8 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.016038 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.025919 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.034358 4784 request.go:700] Waited for 1.863598462s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.036779 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.039214 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.059543 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.078087 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.079707 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.095714 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.097392 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.101217 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb192643d_cbd3_4289_8152_7a5e038f1a7e.slice/crio-260e637181ae9e3f10725ed2d7cb5848ca2f365449979ed7b10e51df5be41727 WatchSource:0}: Error finding container 260e637181ae9e3f10725ed2d7cb5848ca2f365449979ed7b10e51df5be41727: Status 404 returned error can't find the container with id 260e637181ae9e3f10725ed2d7cb5848ca2f365449979ed7b10e51df5be41727 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.116248 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.116597 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.127261 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.146935 4784 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.148643 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tcnkb"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.157245 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.175406 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.182958 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.203938 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.219792 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtcc6\" (UniqueName: \"kubernetes.io/projected/798fd230-84b4-4a83-98db-2e6fd780ca50-kube-api-access-qtcc6\") pod \"dns-operator-744455d44c-n8hdp\" (UID: \"798fd230-84b4-4a83-98db-2e6fd780ca50\") " pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.221381 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.230581 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.232114 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-kjvqd"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.235717 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b8qn\" (UniqueName: \"kubernetes.io/projected/66b95a4b-b2d2-4823-b0c4-cbc8ae47213d-kube-api-access-2b8qn\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pzsb\" (UID: \"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.282358 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.291490 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.291776 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b35d831_6af3_41e4_a111_ebfb9fefb029.slice/crio-ad6f6259a371324c8a8317d0fded311009b7d62f66e919634743c137898f1b62 WatchSource:0}: Error finding container ad6f6259a371324c8a8317d0fded311009b7d62f66e919634743c137898f1b62: Status 404 returned error can't find the container with id ad6f6259a371324c8a8317d0fded311009b7d62f66e919634743c137898f1b62 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.291999 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph9g2\" (UniqueName: \"kubernetes.io/projected/0fb47de9-a3dd-4b61-a8d7-a6d7150f601b-kube-api-access-ph9g2\") pod \"etcd-operator-b45778765-rmjws\" (UID: \"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.294353 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbw6t\" (UniqueName: \"kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t\") pod \"console-f9d7485db-m7m44\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.306155 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.324369 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.331065 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1eb80025_ed6a_4509_99ca_57f7a4c9eefb.slice/crio-0a401e36ff269ba3931b5640e4c6546ed34f39dd1e5f7ab44ee79f7884ddf59f WatchSource:0}: Error finding container 0a401e36ff269ba3931b5640e4c6546ed34f39dd1e5f7ab44ee79f7884ddf59f: Status 404 returned error can't find the container with id 0a401e36ff269ba3931b5640e4c6546ed34f39dd1e5f7ab44ee79f7884ddf59f Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.365841 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367421 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-trusted-ca\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367456 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lqtj\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-kube-api-access-4lqtj\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367511 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367534 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a6d9550-f59e-4abd-adcd-6514e8c143bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367639 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfv4q\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367675 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367754 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367805 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367827 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.367876 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61c795f-92e7-4eb0-8cf0-dcd934749576-serving-cert\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368149 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368210 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-config\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368259 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a6d9550-f59e-4abd-adcd-6514e8c143bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368877 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368959 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.368992 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7fvv\" (UniqueName: \"kubernetes.io/projected/f61c795f-92e7-4eb0-8cf0-dcd934749576-kube-api-access-r7fvv\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.370877 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:47.870862498 +0000 UTC m=+147.290929313 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.414690 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-89565"] Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.447800 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb922af1e_a218_4fc7_8e32_cb3fc03d02e3.slice/crio-5e4b258ab352a5cd5cb144462900e3c3e1931c51162ace4f4b83d059896afb09 WatchSource:0}: Error finding container 5e4b258ab352a5cd5cb144462900e3c3e1931c51162ace4f4b83d059896afb09: Status 404 returned error can't find the container with id 5e4b258ab352a5cd5cb144462900e3c3e1931c51162ace4f4b83d059896afb09 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.469854 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470382 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470592 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdmpf\" (UniqueName: \"kubernetes.io/projected/e0da22bc-148f-48ea-98b0-0e316a52b1a1-kube-api-access-pdmpf\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470619 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-srv-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470655 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dqdv\" (UniqueName: \"kubernetes.io/projected/e5603c68-b940-4ede-9433-500080f5eca9-kube-api-access-6dqdv\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470676 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-trusted-ca\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470708 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf617d33-a828-4556-b92f-90cb28dd8d8c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470731 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x66t\" (UniqueName: \"kubernetes.io/projected/095096dd-0a14-4993-80fb-c332ae212107-kube-api-access-8x66t\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470754 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lqtj\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-kube-api-access-4lqtj\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470802 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-metrics-certs\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470839 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfv4q\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470861 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-cabundle\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470882 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-plugins-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470903 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470927 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fch8c\" (UniqueName: \"kubernetes.io/projected/298cdb81-831b-4653-9491-b9215b59b87d-kube-api-access-fch8c\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470948 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-config\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.470969 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471019 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-proxy-tls\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471043 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h9p6\" (UniqueName: \"kubernetes.io/projected/63a61829-2074-4b44-9297-6dcf6236af1a-kube-api-access-9h9p6\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471067 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471090 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-socket-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471135 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.471172 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.471799 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:47.971775669 +0000 UTC m=+147.391842484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.473598 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-srv-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.473640 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nz64\" (UniqueName: \"kubernetes.io/projected/a23cdf56-ddf2-4914-93d8-18b0d5cdd52f-kube-api-access-8nz64\") pod \"migrator-59844c95c7-wrh5b\" (UID: \"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.474352 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-trusted-ca\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.474770 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478316 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a61829-2074-4b44-9297-6dcf6236af1a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478390 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61c795f-92e7-4eb0-8cf0-dcd934749576-serving-cert\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478412 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-mountpoint-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478428 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095096dd-0a14-4993-80fb-c332ae212107-service-ca-bundle\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478444 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/298cdb81-831b-4653-9491-b9215b59b87d-cert\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478460 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-config\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478475 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bec0da8-c8ae-453e-95e1-060b8fb71de1-metrics-tls\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478490 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478521 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48962005-cd5d-4815-a221-ba37a8037b25-config\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478537 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-stats-auth\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478553 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-profile-collector-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478580 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gngwj\" (UniqueName: \"kubernetes.io/projected/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-kube-api-access-gngwj\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478596 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpbnk\" (UniqueName: \"kubernetes.io/projected/3de2abbd-45d1-489a-8105-448180433f7d-kube-api-access-tpbnk\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478618 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-registration-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478634 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8bzh\" (UniqueName: \"kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478670 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e9df39-e23d-4d44-9ca0-148deb5d9809-serving-cert\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478689 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478705 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7fvv\" (UniqueName: \"kubernetes.io/projected/f61c795f-92e7-4eb0-8cf0-dcd934749576-kube-api-access-r7fvv\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478722 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-default-certificate\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478738 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-csi-data-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478768 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478782 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-certs\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478806 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e06e1c-4128-4e70-b4a7-b83685a7166a-serving-cert\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478835 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478866 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478901 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9jsc\" (UniqueName: \"kubernetes.io/projected/88e9df39-e23d-4d44-9ca0-148deb5d9809-kube-api-access-s9jsc\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478917 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a61829-2074-4b44-9297-6dcf6236af1a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478934 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478951 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a6d9550-f59e-4abd-adcd-6514e8c143bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.478982 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qnvg\" (UniqueName: \"kubernetes.io/projected/7bec0da8-c8ae-453e-95e1-060b8fb71de1-kube-api-access-4qnvg\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479026 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e06e1c-4128-4e70-b4a7-b83685a7166a-config\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479041 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7pdl\" (UniqueName: \"kubernetes.io/projected/3edbf65e-d134-4f22-9c92-0f51e9350f05-kube-api-access-f7pdl\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479061 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48962005-cd5d-4815-a221-ba37a8037b25-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479101 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf617d33-a828-4556-b92f-90cb28dd8d8c-proxy-tls\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479125 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdw24\" (UniqueName: \"kubernetes.io/projected/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-kube-api-access-fdw24\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479149 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479180 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479222 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b577t\" (UniqueName: \"kubernetes.io/projected/232f2d6d-4234-4868-9ede-a37034cc5d5b-kube-api-access-b577t\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479247 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479264 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-key\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479291 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48962005-cd5d-4815-a221-ba37a8037b25-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479309 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479327 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d076d7f-77aa-4e21-9189-80c39bc6147d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479354 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479395 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvm5r\" (UniqueName: \"kubernetes.io/projected/17e06e1c-4128-4e70-b4a7-b83685a7166a-kube-api-access-jvm5r\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479424 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vp94\" (UniqueName: \"kubernetes.io/projected/cf617d33-a828-4556-b92f-90cb28dd8d8c-kube-api-access-5vp94\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479442 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.479471 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.484401 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a6d9550-f59e-4abd-adcd-6514e8c143bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.484875 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.485107 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.485816 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61c795f-92e7-4eb0-8cf0-dcd934749576-config\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.486340 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61c795f-92e7-4eb0-8cf0-dcd934749576-serving-cert\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.486774 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488343 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/232f2d6d-4234-4868-9ede-a37034cc5d5b-tmpfs\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488392 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84pmk\" (UniqueName: \"kubernetes.io/projected/b8c401c4-a06d-45b8-87ef-d76236d85453-kube-api-access-84pmk\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488413 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-auth-proxy-config\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488439 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a6d9550-f59e-4abd-adcd-6514e8c143bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488458 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/88e9df39-e23d-4d44-9ca0-148deb5d9809-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488485 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-node-bootstrap-token\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488514 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488529 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqb85\" (UniqueName: \"kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488551 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bec0da8-c8ae-453e-95e1-060b8fb71de1-config-volume\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488783 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3edbf65e-d134-4f22-9c92-0f51e9350f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488880 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488914 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-images\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488929 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5khb\" (UniqueName: \"kubernetes.io/projected/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-kube-api-access-b5khb\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.488951 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9hds\" (UniqueName: \"kubernetes.io/projected/4d076d7f-77aa-4e21-9189-80c39bc6147d-kube-api-access-m9hds\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.492647 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.495817 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfv4q\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.496609 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a6d9550-f59e-4abd-adcd-6514e8c143bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.526174 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.536100 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lqtj\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-kube-api-access-4lqtj\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.569985 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.571929 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g67wl"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.582532 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7fvv\" (UniqueName: \"kubernetes.io/projected/f61c795f-92e7-4eb0-8cf0-dcd934749576-kube-api-access-r7fvv\") pod \"console-operator-58897d9998-fvbbf\" (UID: \"f61c795f-92e7-4eb0-8cf0-dcd934749576\") " pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592395 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-default-certificate\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592463 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e9df39-e23d-4d44-9ca0-148deb5d9809-serving-cert\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592485 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-csi-data-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592522 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-certs\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592567 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e06e1c-4128-4e70-b4a7-b83685a7166a-serving-cert\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592603 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592651 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9jsc\" (UniqueName: \"kubernetes.io/projected/88e9df39-e23d-4d44-9ca0-148deb5d9809-kube-api-access-s9jsc\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592702 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a61829-2074-4b44-9297-6dcf6236af1a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592766 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qnvg\" (UniqueName: \"kubernetes.io/projected/7bec0da8-c8ae-453e-95e1-060b8fb71de1-kube-api-access-4qnvg\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592792 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7pdl\" (UniqueName: \"kubernetes.io/projected/3edbf65e-d134-4f22-9c92-0f51e9350f05-kube-api-access-f7pdl\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592815 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e06e1c-4128-4e70-b4a7-b83685a7166a-config\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592859 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48962005-cd5d-4815-a221-ba37a8037b25-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592882 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdw24\" (UniqueName: \"kubernetes.io/projected/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-kube-api-access-fdw24\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592926 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf617d33-a828-4556-b92f-90cb28dd8d8c-proxy-tls\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592945 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.592970 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b577t\" (UniqueName: \"kubernetes.io/projected/232f2d6d-4234-4868-9ede-a37034cc5d5b-kube-api-access-b577t\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.593018 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.593041 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-key\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.593060 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48962005-cd5d-4815-a221-ba37a8037b25-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595216 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d076d7f-77aa-4e21-9189-80c39bc6147d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595256 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvm5r\" (UniqueName: \"kubernetes.io/projected/17e06e1c-4128-4e70-b4a7-b83685a7166a-kube-api-access-jvm5r\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595321 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595351 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vp94\" (UniqueName: \"kubernetes.io/projected/cf617d33-a828-4556-b92f-90cb28dd8d8c-kube-api-access-5vp94\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595394 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595414 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/232f2d6d-4234-4868-9ede-a37034cc5d5b-tmpfs\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595440 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-auth-proxy-config\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595480 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84pmk\" (UniqueName: \"kubernetes.io/projected/b8c401c4-a06d-45b8-87ef-d76236d85453-kube-api-access-84pmk\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595508 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-node-bootstrap-token\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595546 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/88e9df39-e23d-4d44-9ca0-148deb5d9809-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595578 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bec0da8-c8ae-453e-95e1-060b8fb71de1-config-volume\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595598 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3edbf65e-d134-4f22-9c92-0f51e9350f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595638 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqb85\" (UniqueName: \"kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595683 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595722 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-images\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595744 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5khb\" (UniqueName: \"kubernetes.io/projected/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-kube-api-access-b5khb\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595791 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9hds\" (UniqueName: \"kubernetes.io/projected/4d076d7f-77aa-4e21-9189-80c39bc6147d-kube-api-access-m9hds\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595817 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdmpf\" (UniqueName: \"kubernetes.io/projected/e0da22bc-148f-48ea-98b0-0e316a52b1a1-kube-api-access-pdmpf\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595841 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-srv-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595886 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dqdv\" (UniqueName: \"kubernetes.io/projected/e5603c68-b940-4ede-9433-500080f5eca9-kube-api-access-6dqdv\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595915 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x66t\" (UniqueName: \"kubernetes.io/projected/095096dd-0a14-4993-80fb-c332ae212107-kube-api-access-8x66t\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.595963 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf617d33-a828-4556-b92f-90cb28dd8d8c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596023 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-metrics-certs\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596053 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-cabundle\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596076 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-plugins-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596116 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596145 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fch8c\" (UniqueName: \"kubernetes.io/projected/298cdb81-831b-4653-9491-b9215b59b87d-kube-api-access-fch8c\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596223 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-config\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596267 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h9p6\" (UniqueName: \"kubernetes.io/projected/63a61829-2074-4b44-9297-6dcf6236af1a-kube-api-access-9h9p6\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596292 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-proxy-tls\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596315 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-socket-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596357 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596388 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596431 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596453 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-srv-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596475 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nz64\" (UniqueName: \"kubernetes.io/projected/a23cdf56-ddf2-4914-93d8-18b0d5cdd52f-kube-api-access-8nz64\") pod \"migrator-59844c95c7-wrh5b\" (UID: \"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596524 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a61829-2074-4b44-9297-6dcf6236af1a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596557 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-mountpoint-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596601 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095096dd-0a14-4993-80fb-c332ae212107-service-ca-bundle\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596623 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/298cdb81-831b-4653-9491-b9215b59b87d-cert\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596641 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bec0da8-c8ae-453e-95e1-060b8fb71de1-metrics-tls\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596696 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596725 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48962005-cd5d-4815-a221-ba37a8037b25-config\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596769 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-stats-auth\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596793 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-profile-collector-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596835 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gngwj\" (UniqueName: \"kubernetes.io/projected/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-kube-api-access-gngwj\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596860 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpbnk\" (UniqueName: \"kubernetes.io/projected/3de2abbd-45d1-489a-8105-448180433f7d-kube-api-access-tpbnk\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596865 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e06e1c-4128-4e70-b4a7-b83685a7166a-config\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596890 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-registration-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.596938 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8bzh\" (UniqueName: \"kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.597919 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a61829-2074-4b44-9297-6dcf6236af1a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.599537 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.599863 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-plugins-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.593066 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-csi-data-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.601701 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-mountpoint-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.603041 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/095096dd-0a14-4993-80fb-c332ae212107-service-ca-bundle\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.607034 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-default-certificate\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.607053 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-registration-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.607773 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.607847 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.608374 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48962005-cd5d-4815-a221-ba37a8037b25-config\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.608430 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-config\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.608888 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-images\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.609000 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-auth-proxy-config\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.609205 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf617d33-a828-4556-b92f-90cb28dd8d8c-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.609403 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.109386238 +0000 UTC m=+147.529453053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.609747 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3de2abbd-45d1-489a-8105-448180433f7d-socket-dir\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.610495 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.611525 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/232f2d6d-4234-4868-9ede-a37034cc5d5b-tmpfs\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.611840 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/88e9df39-e23d-4d44-9ca0-148deb5d9809-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.613147 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bec0da8-c8ae-453e-95e1-060b8fb71de1-config-volume\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.614024 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-certs\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.614357 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-key\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.614970 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-signing-cabundle\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.615920 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48962005-cd5d-4815-a221-ba37a8037b25-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.616142 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.616218 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.618928 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-srv-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.622485 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88e9df39-e23d-4d44-9ca0-148deb5d9809-serving-cert\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.624077 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4d076d7f-77aa-4e21-9189-80c39bc6147d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.625730 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a61829-2074-4b44-9297-6dcf6236af1a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.626821 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17e06e1c-4128-4e70-b4a7-b83685a7166a-serving-cert\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.626892 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf617d33-a828-4556-b92f-90cb28dd8d8c-proxy-tls\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.627599 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b8c401c4-a06d-45b8-87ef-d76236d85453-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.628830 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/298cdb81-831b-4653-9491-b9215b59b87d-cert\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.628930 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.630455 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a6d9550-f59e-4abd-adcd-6514e8c143bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-c9str\" (UID: \"2a6d9550-f59e-4abd-adcd-6514e8c143bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.631053 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.631069 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-proxy-tls\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.631725 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/232f2d6d-4234-4868-9ede-a37034cc5d5b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.632495 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-profile-collector-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.634054 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0da22bc-148f-48ea-98b0-0e316a52b1a1-srv-cert\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.634422 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.634940 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.635006 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-stats-auth\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.635242 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bec0da8-c8ae-453e-95e1-060b8fb71de1-metrics-tls\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.635398 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a7ecea0-b46b-46dd-b210-56ffe3104e3e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hb74q\" (UID: \"3a7ecea0-b46b-46dd-b210-56ffe3104e3e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.635485 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/095096dd-0a14-4993-80fb-c332ae212107-metrics-certs\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.635647 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e5603c68-b940-4ede-9433-500080f5eca9-node-bootstrap-token\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.636750 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3edbf65e-d134-4f22-9c92-0f51e9350f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.657381 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.658237 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48962005-cd5d-4815-a221-ba37a8037b25-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2hkzk\" (UID: \"48962005-cd5d-4815-a221-ba37a8037b25\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.683765 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdw24\" (UniqueName: \"kubernetes.io/projected/99e59e8a-dbaf-4ae4-82d5-505cde15ff2b-kube-api-access-fdw24\") pod \"service-ca-9c57cc56f-z2p7n\" (UID: \"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b\") " pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.698622 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.699259 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.199242354 +0000 UTC m=+147.619309169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.699336 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.703872 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qnvg\" (UniqueName: \"kubernetes.io/projected/7bec0da8-c8ae-453e-95e1-060b8fb71de1-kube-api-access-4qnvg\") pod \"dns-default-l8jkp\" (UID: \"7bec0da8-c8ae-453e-95e1-060b8fb71de1\") " pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.718633 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7pdl\" (UniqueName: \"kubernetes.io/projected/3edbf65e-d134-4f22-9c92-0f51e9350f05-kube-api-access-f7pdl\") pod \"package-server-manager-789f6589d5-qbc5m\" (UID: \"3edbf65e-d134-4f22-9c92-0f51e9350f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: W1205 12:27:47.740405 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-1944816dd302a98bb9665d848779ec5db94952441353c66fa5d4ee4990e63f74 WatchSource:0}: Error finding container 1944816dd302a98bb9665d848779ec5db94952441353c66fa5d4ee4990e63f74: Status 404 returned error can't find the container with id 1944816dd302a98bb9665d848779ec5db94952441353c66fa5d4ee4990e63f74 Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.744957 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" event={"ID":"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5","Type":"ContainerStarted","Data":"39bb2d3ad72b9efbb20802c7a32bd28754f0667c0e0eb89e4c983b6e11c1e128"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.745995 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9jsc\" (UniqueName: \"kubernetes.io/projected/88e9df39-e23d-4d44-9ca0-148deb5d9809-kube-api-access-s9jsc\") pod \"openshift-config-operator-7777fb866f-wfsr7\" (UID: \"88e9df39-e23d-4d44-9ca0-148deb5d9809\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.751541 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.754428 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" event={"ID":"260e147b-2517-481c-93f4-3335794f5a1e","Type":"ContainerStarted","Data":"1c17859c9e9a912611397d133d2facca2b07b2549de3460891b840ec37fb175f"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.757004 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpbnk\" (UniqueName: \"kubernetes.io/projected/3de2abbd-45d1-489a-8105-448180433f7d-kube-api-access-tpbnk\") pod \"csi-hostpathplugin-tbmvb\" (UID: \"3de2abbd-45d1-489a-8105-448180433f7d\") " pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.766823 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.772117 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k7w2s"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.776370 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" event={"ID":"1b35d831-6af3-41e4-a111-ebfb9fefb029","Type":"ContainerStarted","Data":"ad6f6259a371324c8a8317d0fded311009b7d62f66e919634743c137898f1b62"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.776390 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gngwj\" (UniqueName: \"kubernetes.io/projected/e5f2aa15-edae-4c3f-af17-1fbb714f3f53-kube-api-access-gngwj\") pod \"multus-admission-controller-857f4d67dd-df92s\" (UID: \"e5f2aa15-edae-4c3f-af17-1fbb714f3f53\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.783290 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.796587 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a3dbc65451bc896f5845ecb46af0d19e746866208fa7bcd8dc8ac9dd908eaad6"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.797521 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b577t\" (UniqueName: \"kubernetes.io/projected/232f2d6d-4234-4868-9ede-a37034cc5d5b-kube-api-access-b577t\") pod \"packageserver-d55dfcdfc-wsxgh\" (UID: \"232f2d6d-4234-4868-9ede-a37034cc5d5b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.801201 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.801516 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.301499501 +0000 UTC m=+147.721566336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.802097 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.808524 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" event={"ID":"1eb80025-ed6a-4509-99ca-57f7a4c9eefb","Type":"ContainerStarted","Data":"0a401e36ff269ba3931b5640e4c6546ed34f39dd1e5f7ab44ee79f7884ddf59f"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.817120 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvm5r\" (UniqueName: \"kubernetes.io/projected/17e06e1c-4128-4e70-b4a7-b83685a7166a-kube-api-access-jvm5r\") pod \"service-ca-operator-777779d784-bzfdn\" (UID: \"17e06e1c-4128-4e70-b4a7-b83685a7166a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.827041 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" event={"ID":"b922af1e-a218-4fc7-8e32-cb3fc03d02e3","Type":"ContainerStarted","Data":"5e4b258ab352a5cd5cb144462900e3c3e1931c51162ace4f4b83d059896afb09"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.842885 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" event={"ID":"4c735838-6f43-4f60-9adb-4dcabc8f05c2","Type":"ContainerStarted","Data":"71d55f88da2c9595bd8a2327a8eb54b27d737e8eaa0dfab248f156b4fe8e446d"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.842942 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" event={"ID":"4c735838-6f43-4f60-9adb-4dcabc8f05c2","Type":"ContainerStarted","Data":"ce7c23fe818d7448c4b29455a9655d15a5e56f2f43546193b2437998c0ce49c8"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.847037 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fch8c\" (UniqueName: \"kubernetes.io/projected/298cdb81-831b-4653-9491-b9215b59b87d-kube-api-access-fch8c\") pod \"ingress-canary-f2nct\" (UID: \"298cdb81-831b-4653-9491-b9215b59b87d\") " pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.850969 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.858877 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" event={"ID":"b192643d-cbd3-4289-8152-7a5e038f1a7e","Type":"ContainerStarted","Data":"87558245a9c38638332b4958b8c929eb1416f031cf2bb858146ab294f7af499a"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.859159 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" event={"ID":"b192643d-cbd3-4289-8152-7a5e038f1a7e","Type":"ContainerStarted","Data":"260e637181ae9e3f10725ed2d7cb5848ca2f365449979ed7b10e51df5be41727"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.866362 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" event={"ID":"b63dfa9f-c0b4-41d2-9a30-10388f0ad077","Type":"ContainerStarted","Data":"d1a7928c92198127153b1a9c3eb08d12ab7075296b6c026c424874b32efa207d"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.867945 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdmpf\" (UniqueName: \"kubernetes.io/projected/e0da22bc-148f-48ea-98b0-0e316a52b1a1-kube-api-access-pdmpf\") pod \"catalog-operator-68c6474976-fm4wv\" (UID: \"e0da22bc-148f-48ea-98b0-0e316a52b1a1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.873681 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.882608 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqb85\" (UniqueName: \"kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85\") pod \"collect-profiles-29415615-7t8s6\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.888535 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.899837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0d1f50e5-5ec7-4b19-ab3f-119001d4695c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ptn7m\" (UID: \"0d1f50e5-5ec7-4b19-ab3f-119001d4695c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.902549 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:47 crc kubenswrapper[4784]: E1205 12:27:47.902988 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.402973092 +0000 UTC m=+147.823039907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.910762 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb"] Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.927094 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dqdv\" (UniqueName: \"kubernetes.io/projected/e5603c68-b940-4ede-9433-500080f5eca9-kube-api-access-6dqdv\") pod \"machine-config-server-2c9tf\" (UID: \"e5603c68-b940-4ede-9433-500080f5eca9\") " pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.930091 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84pmk\" (UniqueName: \"kubernetes.io/projected/b8c401c4-a06d-45b8-87ef-d76236d85453-kube-api-access-84pmk\") pod \"olm-operator-6b444d44fb-dfh8l\" (UID: \"b8c401c4-a06d-45b8-87ef-d76236d85453\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.943374 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" event={"ID":"96b39f3e-b508-4f02-ae7a-d391eeca4988","Type":"ContainerStarted","Data":"bb3daf14eaf2b2910ebd356947fc88784d35a61d904d5a33d02b0525ba264e13"} Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.943891 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.945406 4784 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2drbk container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.945448 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.968966 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nz64\" (UniqueName: \"kubernetes.io/projected/a23cdf56-ddf2-4914-93d8-18b0d5cdd52f-kube-api-access-8nz64\") pod \"migrator-59844c95c7-wrh5b\" (UID: \"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" Dec 05 12:27:47 crc kubenswrapper[4784]: I1205 12:27:47.982914 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.004004 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.005170 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.505156118 +0000 UTC m=+147.925222933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.015787 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.017395 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.032506 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8bzh\" (UniqueName: \"kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh\") pod \"marketplace-operator-79b997595-txh4x\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.033255 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vp94\" (UniqueName: \"kubernetes.io/projected/cf617d33-a828-4556-b92f-90cb28dd8d8c-kube-api-access-5vp94\") pod \"machine-config-controller-84d6567774-599vr\" (UID: \"cf617d33-a828-4556-b92f-90cb28dd8d8c\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.036337 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kld95"] Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.042504 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fvbbf"] Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.046460 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.046789 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5khb\" (UniqueName: \"kubernetes.io/projected/c06c3bf6-dd05-4b6a-a64a-5b92a1082c95-kube-api-access-b5khb\") pod \"machine-config-operator-74547568cd-q7vnr\" (UID: \"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.053269 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9hds\" (UniqueName: \"kubernetes.io/projected/4d076d7f-77aa-4e21-9189-80c39bc6147d-kube-api-access-m9hds\") pod \"control-plane-machine-set-operator-78cbb6b69f-shp28\" (UID: \"4d076d7f-77aa-4e21-9189-80c39bc6147d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.062316 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.075999 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.076056 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x66t\" (UniqueName: \"kubernetes.io/projected/095096dd-0a14-4993-80fb-c332ae212107-kube-api-access-8x66t\") pod \"router-default-5444994796-wpt8s\" (UID: \"095096dd-0a14-4993-80fb-c332ae212107\") " pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.083264 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h9p6\" (UniqueName: \"kubernetes.io/projected/63a61829-2074-4b44-9297-6dcf6236af1a-kube-api-access-9h9p6\") pod \"kube-storage-version-migrator-operator-b67b599dd-rbpdj\" (UID: \"63a61829-2074-4b44-9297-6dcf6236af1a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.091581 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.122131 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.124379 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk"] Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.124771 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.624749149 +0000 UTC m=+148.044815964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.126893 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.127644 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f2nct" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.128136 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.128845 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.151310 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n8hdp"] Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.157691 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rmjws"] Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.162414 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-2c9tf" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.230323 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.230796 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.730780298 +0000 UTC m=+148.150847113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.274100 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.323402 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.330849 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.331825 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.336711 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.836685663 +0000 UTC m=+148.256752478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.338490 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.433173 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.433730 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:48.933718019 +0000 UTC m=+148.353784834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.534444 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.534769 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.034736274 +0000 UTC m=+148.454803089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.534893 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.535271 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.035258802 +0000 UTC m=+148.455325617 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.647732 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.647902 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.14788028 +0000 UTC m=+148.567947095 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.648057 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.648593 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.148557914 +0000 UTC m=+148.568624729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.749846 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.750847 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.250829742 +0000 UTC m=+148.670896557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.852262 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.852651 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.352635645 +0000 UTC m=+148.772702470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.973436 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.973733 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.473701487 +0000 UTC m=+148.893768302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.974378 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:48 crc kubenswrapper[4784]: E1205 12:27:48.974876 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.474848297 +0000 UTC m=+148.894915112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:48 crc kubenswrapper[4784]: I1205 12:27:48.977946 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q"] Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.092407 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.092826 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.592806701 +0000 UTC m=+149.012873516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.108454 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"01bb41e5dee6ab677a7334ad290d19f655f40e14239172c5a61d0986ba096b89"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.108820 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m"] Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.113203 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" event={"ID":"fe31ef3a-357d-4bbc-89e8-99a1f6ec12f5","Type":"ContainerStarted","Data":"09cf598908a890c12a1c4b6aee62678cc0b64d4b05de6c238f17146703a000f3"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.134032 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" podStartSLOduration=126.134015926 podStartE2EDuration="2m6.134015926s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.131613971 +0000 UTC m=+148.551680786" watchObservedRunningTime="2025-12-05 12:27:49.134015926 +0000 UTC m=+148.554082741" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.153229 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"2bc3b3199497bea13a46796e6a7e6298780fc82ee2e39538e2a00b45ba317fef"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.153322 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.184145 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" event={"ID":"1eb80025-ed6a-4509-99ca-57f7a4c9eefb","Type":"ContainerStarted","Data":"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.185017 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.194031 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.194678 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.694659165 +0000 UTC m=+149.114726040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.208884 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" event={"ID":"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d","Type":"ContainerStarted","Data":"86b209040df41aca68fc7e8a509a7bc64954d7618b77c3e51941b693d8beabdd"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.240288 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" event={"ID":"260e147b-2517-481c-93f4-3335794f5a1e","Type":"ContainerStarted","Data":"0bc1e07d7cab18aa831e8db1dbbb48d6fb446634dc8463bdd575c6ec80caf8a8"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.281465 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" event={"ID":"798fd230-84b4-4a83-98db-2e6fd780ca50","Type":"ContainerStarted","Data":"0b77531355777498c43008c7b85d24a6169c42c262bd3347c1c9eaabbdaa43c4"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.297947 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.298094 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.798079723 +0000 UTC m=+149.218146539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.298650 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.301096 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wpt8s" event={"ID":"095096dd-0a14-4993-80fb-c332ae212107","Type":"ContainerStarted","Data":"a96e426acc1130108252a3672bb1be6dde17a9220446c9d530fa58465e6843b8"} Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.301479 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.801465411 +0000 UTC m=+149.221532226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.359957 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k7w2s" event={"ID":"30b70430-7471-4b38-a1a2-22d557f5e1ca","Type":"ContainerStarted","Data":"a00345a4827aae2f42f23e5099d7073541a61e42aeb0d0b1ed705910ba5c1a65"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.360021 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k7w2s" event={"ID":"30b70430-7471-4b38-a1a2-22d557f5e1ca","Type":"ContainerStarted","Data":"ec177c8482714b8eaa875864532e36b20153388818be2571c08d0942f5b10c76"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.360370 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.376120 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f5pj2" podStartSLOduration=126.376104338 podStartE2EDuration="2m6.376104338s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.341571436 +0000 UTC m=+148.761638251" watchObservedRunningTime="2025-12-05 12:27:49.376104338 +0000 UTC m=+148.796171153" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.399313 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.399667 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.899606726 +0000 UTC m=+149.319673541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.399780 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.401079 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:49.901062466 +0000 UTC m=+149.321129281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.411523 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" event={"ID":"96b39f3e-b508-4f02-ae7a-d391eeca4988","Type":"ContainerStarted","Data":"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.429104 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m7m44" event={"ID":"663c4c9a-9738-4c49-9199-d2a18cd6d4be","Type":"ContainerStarted","Data":"fee62a5a67b8b559bfaae53985558252d9a01b8b38558502b7e0d5c26b6e5557"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.433389 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" event={"ID":"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b","Type":"ContainerStarted","Data":"d7348182415ddc43b4feade992e33440519ece96fba673c6e92db6faae5f80e3"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.436255 4784 generic.go:334] "Generic (PLEG): container finished" podID="b63dfa9f-c0b4-41d2-9a30-10388f0ad077" containerID="47d434bb0455dbb9e0c510f8d71dff17db75f70d1e7d5fc0328ed4a8864c17e3" exitCode=0 Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.436526 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" event={"ID":"b63dfa9f-c0b4-41d2-9a30-10388f0ad077","Type":"ContainerDied","Data":"47d434bb0455dbb9e0c510f8d71dff17db75f70d1e7d5fc0328ed4a8864c17e3"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.437583 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" event={"ID":"48962005-cd5d-4815-a221-ba37a8037b25","Type":"ContainerStarted","Data":"6f5ebb44bc645171194cb2647b31798e6a0ead3218b9bc5fb809dcb99a2127c4"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.438663 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" event={"ID":"e61ab90d-5502-48b1-9d9b-bb257fd3ac74","Type":"ContainerStarted","Data":"a05b1ab9bdae373b10c0da08a23277e5af0bcdd355f97b05a1ead04e67505875"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.438696 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" event={"ID":"e61ab90d-5502-48b1-9d9b-bb257fd3ac74","Type":"ContainerStarted","Data":"fbade1504bfa1ac53da53d93674e7a9e08d3103a4367327ef2b961063cc26f96"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.440054 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" event={"ID":"f61c795f-92e7-4eb0-8cf0-dcd934749576","Type":"ContainerStarted","Data":"3f3ab1c6a373062e3ce416694a399e5874792e0247d4931dacff36367c17b8e6"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.442254 4784 generic.go:334] "Generic (PLEG): container finished" podID="b922af1e-a218-4fc7-8e32-cb3fc03d02e3" containerID="3086d0b8573bc4080bdf2c4277242db88bbf8149faed96312fac1cd5099af98b" exitCode=0 Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.442304 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" event={"ID":"b922af1e-a218-4fc7-8e32-cb3fc03d02e3","Type":"ContainerDied","Data":"3086d0b8573bc4080bdf2c4277242db88bbf8149faed96312fac1cd5099af98b"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.455989 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" event={"ID":"4c735838-6f43-4f60-9adb-4dcabc8f05c2","Type":"ContainerStarted","Data":"3d8f1ef2208eb3dce49b6a311ccc5a0d4ddd89f8ada9052a832655d8dec74e53"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.457748 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-2c9tf" event={"ID":"e5603c68-b940-4ede-9433-500080f5eca9","Type":"ContainerStarted","Data":"7cb6a2edf416775b7073f58fff1787a4c3b972c2f2d9de72f9d98cb2f5b4d933"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.458950 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" event={"ID":"da405e0d-550f-42a2-8b4e-a387eabb8e0a","Type":"ContainerStarted","Data":"ba2b8cb3abcff27c986b848d1c561f362fac0c53b509b057c437687c31c0f353"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.462515 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" event={"ID":"1b35d831-6af3-41e4-a111-ebfb9fefb029","Type":"ContainerStarted","Data":"1292d9e869e297221322dd3c6b78482c20c3d08f6e87753c90ebff7301373134"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.462554 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" event={"ID":"1b35d831-6af3-41e4-a111-ebfb9fefb029","Type":"ContainerStarted","Data":"4492e8d4e22c24ded39ed203113c63d4ff368c6be86ff32cc0d48cb634978c81"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.478539 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c45bba2f77df4829386c4ecf6695d12329346d8bf5f356e4b519ec101be335c4"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.478576 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1944816dd302a98bb9665d848779ec5db94952441353c66fa5d4ee4990e63f74"} Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.504643 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.505762 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.005746389 +0000 UTC m=+149.425813204 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.549320 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" podStartSLOduration=125.549304884 podStartE2EDuration="2m5.549304884s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.548847728 +0000 UTC m=+148.968914543" watchObservedRunningTime="2025-12-05 12:27:49.549304884 +0000 UTC m=+148.969371699" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.590137 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-k7w2s" podStartSLOduration=126.590117435 podStartE2EDuration="2m6.590117435s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.585267145 +0000 UTC m=+149.005333960" watchObservedRunningTime="2025-12-05 12:27:49.590117435 +0000 UTC m=+149.010184250" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.606789 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.612218 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.112182822 +0000 UTC m=+149.532249697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.708274 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.708445 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.20842368 +0000 UTC m=+149.628490495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.708507 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.709082 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.209070223 +0000 UTC m=+149.629137038 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.723647 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" podStartSLOduration=126.72362573 podStartE2EDuration="2m6.72362573s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.689563054 +0000 UTC m=+149.109629869" watchObservedRunningTime="2025-12-05 12:27:49.72362573 +0000 UTC m=+149.143692545" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.758177 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2mmsl" podStartSLOduration=126.758161842 podStartE2EDuration="2m6.758161842s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.724405547 +0000 UTC m=+149.144472362" watchObservedRunningTime="2025-12-05 12:27:49.758161842 +0000 UTC m=+149.178228647" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.805976 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m2c4c" podStartSLOduration=126.805953684 podStartE2EDuration="2m6.805953684s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.792464374 +0000 UTC m=+149.212531199" watchObservedRunningTime="2025-12-05 12:27:49.805953684 +0000 UTC m=+149.226020499" Dec 05 12:27:49 crc kubenswrapper[4784]: W1205 12:27:49.808349 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3edbf65e_d134_4f22_9c92_0f51e9350f05.slice/crio-db7d7dbece056a8d2cf8c4c6a25a692174e09dc2bf56be7a85a67d3babcee95a WatchSource:0}: Error finding container db7d7dbece056a8d2cf8c4c6a25a692174e09dc2bf56be7a85a67d3babcee95a: Status 404 returned error can't find the container with id db7d7dbece056a8d2cf8c4c6a25a692174e09dc2bf56be7a85a67d3babcee95a Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.811342 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.811616 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.31160066 +0000 UTC m=+149.731667475 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.823759 4784 patch_prober.go:28] interesting pod/downloads-7954f5f757-k7w2s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.823814 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k7w2s" podUID="30b70430-7471-4b38-a1a2-22d557f5e1ca" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.827578 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.829922 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.929526 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:49 crc kubenswrapper[4784]: E1205 12:27:49.931978 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.431956887 +0000 UTC m=+149.852023692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:49 crc kubenswrapper[4784]: I1205 12:27:49.961873 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-kjvqd" podStartSLOduration=125.961841618 podStartE2EDuration="2m5.961841618s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:49.957717924 +0000 UTC m=+149.377784739" watchObservedRunningTime="2025-12-05 12:27:49.961841618 +0000 UTC m=+149.381908433" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.063867 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.064950 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.564930855 +0000 UTC m=+149.984997670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.137680 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-c9str"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.149648 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tbmvb"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.167181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.168125 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.668107004 +0000 UTC m=+150.088173809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.175787 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z2p7n"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.268571 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.269297 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.769275275 +0000 UTC m=+150.189342090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.296382 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-df92s"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.375848 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.376267 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.876250867 +0000 UTC m=+150.296317692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: W1205 12:27:50.418054 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5f2aa15_edae_4c3f_af17_1fbb714f3f53.slice/crio-34020b65e29c6137e12af45bf3bfd65d36dcf17296784aa2d8eee076d7c83662 WatchSource:0}: Error finding container 34020b65e29c6137e12af45bf3bfd65d36dcf17296784aa2d8eee076d7c83662: Status 404 returned error can't find the container with id 34020b65e29c6137e12af45bf3bfd65d36dcf17296784aa2d8eee076d7c83662 Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.477178 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.477319 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.977300423 +0000 UTC m=+150.397367238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.477787 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.483592 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:50.98357271 +0000 UTC m=+150.403639525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.515901 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.532817 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-2c9tf" event={"ID":"e5603c68-b940-4ede-9433-500080f5eca9","Type":"ContainerStarted","Data":"78e4a0dba2b9e70041f3159f5a9a2a532cb9aefd9e99ad34102b3eb161871515"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.545144 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-l8jkp"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.550259 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" event={"ID":"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b","Type":"ContainerStarted","Data":"7401d3a416c3eb81f1135dc8012f2af2f211d1057a12a70c7f987ad833b6e824"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.553427 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" event={"ID":"66b95a4b-b2d2-4823-b0c4-cbc8ae47213d","Type":"ContainerStarted","Data":"4366e068137df64909647fd56c3c2fd629ff76468a4ba01093ad0c90b6f19d35"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.577487 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" event={"ID":"3edbf65e-d134-4f22-9c92-0f51e9350f05","Type":"ContainerStarted","Data":"c545dc0594641b31159484dbaebe97dfaac5f80540c0b6a95f9eb9ecd04d7972"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.577549 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" event={"ID":"3edbf65e-d134-4f22-9c92-0f51e9350f05","Type":"ContainerStarted","Data":"db7d7dbece056a8d2cf8c4c6a25a692174e09dc2bf56be7a85a67d3babcee95a"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.579796 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.581727 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-2c9tf" podStartSLOduration=6.581716075 podStartE2EDuration="6.581716075s" podCreationTimestamp="2025-12-05 12:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:50.557624967 +0000 UTC m=+149.977691782" watchObservedRunningTime="2025-12-05 12:27:50.581716075 +0000 UTC m=+150.001782890" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.582743 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.08271976 +0000 UTC m=+150.502786575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.584923 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.595798 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pzsb" podStartSLOduration=127.595780325 podStartE2EDuration="2m7.595780325s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:50.593084351 +0000 UTC m=+150.013151176" watchObservedRunningTime="2025-12-05 12:27:50.595780325 +0000 UTC m=+150.015847240" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.596927 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2e730fac3899ad4487584ebd141bb9be4619231f58e0d3330426fb84bf2c6140"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.666642 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" event={"ID":"798fd230-84b4-4a83-98db-2e6fd780ca50","Type":"ContainerStarted","Data":"ead071024453550cbb0a7d47b7caefabd424e81426edad3acf472a7302f844d9"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.694839 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wpt8s" event={"ID":"095096dd-0a14-4993-80fb-c332ae212107","Type":"ContainerStarted","Data":"36826d482b545e1639cd1355a720748d85af17cf1ce6eb1792c79e4231008cf8"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.696029 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.696376 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.196362994 +0000 UTC m=+150.616429809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.703940 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" event={"ID":"e5f2aa15-edae-4c3f-af17-1fbb714f3f53","Type":"ContainerStarted","Data":"34020b65e29c6137e12af45bf3bfd65d36dcf17296784aa2d8eee076d7c83662"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.715113 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.720112 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" event={"ID":"b63dfa9f-c0b4-41d2-9a30-10388f0ad077","Type":"ContainerStarted","Data":"62f54c0d427d2e295ee208df35e02912b56af20212ab64bc8b880145fd5c5a69"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.734231 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m7m44" event={"ID":"663c4c9a-9738-4c49-9199-d2a18cd6d4be","Type":"ContainerStarted","Data":"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.741242 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b"] Dec 05 12:27:50 crc kubenswrapper[4784]: W1205 12:27:50.741592 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17e06e1c_4128_4e70_b4a7_b83685a7166a.slice/crio-0c57fb95dcc10ee7480d2d90b5471c4af895ec24bf4423ec9a74b391f3834526 WatchSource:0}: Error finding container 0c57fb95dcc10ee7480d2d90b5471c4af895ec24bf4423ec9a74b391f3834526: Status 404 returned error can't find the container with id 0c57fb95dcc10ee7480d2d90b5471c4af895ec24bf4423ec9a74b391f3834526 Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.752509 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" event={"ID":"0fb47de9-a3dd-4b61-a8d7-a6d7150f601b","Type":"ContainerStarted","Data":"d194ef600b9b096b717873b197b7085504592b5c0b67b96a12106b4d5731600b"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.754167 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" event={"ID":"2a6d9550-f59e-4abd-adcd-6514e8c143bc","Type":"ContainerStarted","Data":"c16893269b347e185a44a0049dfcccc05544fe1a04ec76def883a6c2d3a91dfc"} Dec 05 12:27:50 crc kubenswrapper[4784]: W1205 12:27:50.771327 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0a51b7e_45fa_4c8b_9700_0872a5f49527.slice/crio-f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be WatchSource:0}: Error finding container f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be: Status 404 returned error can't find the container with id f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.773049 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" event={"ID":"3a7ecea0-b46b-46dd-b210-56ffe3104e3e","Type":"ContainerStarted","Data":"b15e8579b08e0b3cd35601246f6c7f721bd73935e6a4909dd1161ac5dc30809a"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.773088 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" event={"ID":"3a7ecea0-b46b-46dd-b210-56ffe3104e3e","Type":"ContainerStarted","Data":"d36c17086fe8e787ca7a07ed65661885c5d8fc8359d85c11f1bd8a356b6e2e57"} Dec 05 12:27:50 crc kubenswrapper[4784]: W1205 12:27:50.776751 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda23cdf56_ddf2_4914_93d8_18b0d5cdd52f.slice/crio-61232a71ddcdedee946fb2ae584f4704d6d756a151e1e7e34f169a3c1a3af52b WatchSource:0}: Error finding container 61232a71ddcdedee946fb2ae584f4704d6d756a151e1e7e34f169a3c1a3af52b: Status 404 returned error can't find the container with id 61232a71ddcdedee946fb2ae584f4704d6d756a151e1e7e34f169a3c1a3af52b Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.784025 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-wpt8s" podStartSLOduration=126.784001774 podStartE2EDuration="2m6.784001774s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:50.771739078 +0000 UTC m=+150.191805893" watchObservedRunningTime="2025-12-05 12:27:50.784001774 +0000 UTC m=+150.204068589" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.820070 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.821044 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.321018022 +0000 UTC m=+150.741084837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.822717 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" event={"ID":"f61c795f-92e7-4eb0-8cf0-dcd934749576","Type":"ContainerStarted","Data":"12f41171588027c66d8ab6af9b8d055087f1b366e890369e9ad579c771f73be1"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.822749 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.835478 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" event={"ID":"da405e0d-550f-42a2-8b4e-a387eabb8e0a","Type":"ContainerStarted","Data":"b9970169ea79c86d085d07d939b108567ec2b85e42b6c08645c31f4c1fbfa6c9"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.836320 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.837511 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-txh4x"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.855354 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" event={"ID":"48962005-cd5d-4815-a221-ba37a8037b25","Type":"ContainerStarted","Data":"e36ad6e568ed03abfb21153520e5389b6133efa1d5e0b03022c8fed2223c053b"} Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.917741 4784 patch_prober.go:28] interesting pod/console-operator-58897d9998-fvbbf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/readyz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.917785 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-m7m44" podStartSLOduration=127.917761858 podStartE2EDuration="2m7.917761858s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:50.858957802 +0000 UTC m=+150.279024627" watchObservedRunningTime="2025-12-05 12:27:50.917761858 +0000 UTC m=+150.337828673" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.919880 4784 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-kld95 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.919998 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.917829 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" podUID="f61c795f-92e7-4eb0-8cf0-dcd934749576" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/readyz\": dial tcp 10.217.0.26:8443: connect: connection refused" Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.920522 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.922595 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:50 crc kubenswrapper[4784]: E1205 12:27:50.925829 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.425810307 +0000 UTC m=+150.845877122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.927770 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f2nct"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.932334 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj"] Dec 05 12:27:50 crc kubenswrapper[4784]: I1205 12:27:50.946746 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" event={"ID":"e61ab90d-5502-48b1-9d9b-bb257fd3ac74","Type":"ContainerStarted","Data":"7eeb750ec1b7f0eedf3aa65de54d8d12477581d4551113269381b44e46b5d232"} Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.012273 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-rmjws" podStartSLOduration=128.012244596 podStartE2EDuration="2m8.012244596s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:50.994010011 +0000 UTC m=+150.414076826" watchObservedRunningTime="2025-12-05 12:27:51.012244596 +0000 UTC m=+150.432311411" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.029623 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.032801 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.530242011 +0000 UTC m=+150.950308826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.048442 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" event={"ID":"3de2abbd-45d1-489a-8105-448180433f7d","Type":"ContainerStarted","Data":"410d12f1c19c2d3ce54664db9e6a76949869abdb60a2f9867f4e05540afe75bc"} Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.048481 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.048499 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.048508 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-599vr"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.048517 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.093356 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" event={"ID":"b922af1e-a218-4fc7-8e32-cb3fc03d02e3","Type":"ContainerStarted","Data":"696b0eebaf913068313e68a76695326ea30e152bb79230f296ed59682abf8163"} Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.101329 4784 patch_prober.go:28] interesting pod/downloads-7954f5f757-k7w2s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.101398 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k7w2s" podUID="30b70430-7471-4b38-a1a2-22d557f5e1ca" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.102809 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rmq4p" podStartSLOduration=128.102792226 podStartE2EDuration="2m8.102792226s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.065484188 +0000 UTC m=+150.485551003" watchObservedRunningTime="2025-12-05 12:27:51.102792226 +0000 UTC m=+150.522859041" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.103140 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.115384 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" podStartSLOduration=128.115362733 podStartE2EDuration="2m8.115362733s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.114018466 +0000 UTC m=+150.534085301" watchObservedRunningTime="2025-12-05 12:27:51.115362733 +0000 UTC m=+150.535429548" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.134613 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.137643 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.137908 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.637891487 +0000 UTC m=+151.057958362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.146125 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:51 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:51 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:51 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.150083 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.166603 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" podStartSLOduration=128.166583875 podStartE2EDuration="2m8.166583875s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.165869381 +0000 UTC m=+150.585936216" watchObservedRunningTime="2025-12-05 12:27:51.166583875 +0000 UTC m=+150.586650690" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.184787 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr"] Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.219319 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:27:51 crc kubenswrapper[4784]: W1205 12:27:51.241651 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf617d33_a828_4556_b92f_90cb28dd8d8c.slice/crio-38bb1ca2ad0f1ac295395095d9589f00370c494af62695b1d0d9e3471f7f4837 WatchSource:0}: Error finding container 38bb1ca2ad0f1ac295395095d9589f00370c494af62695b1d0d9e3471f7f4837: Status 404 returned error can't find the container with id 38bb1ca2ad0f1ac295395095d9589f00370c494af62695b1d0d9e3471f7f4837 Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.252367 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.252611 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.752587898 +0000 UTC m=+151.172654703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.253154 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.254860 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.754838206 +0000 UTC m=+151.174905021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.261650 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2hkzk" podStartSLOduration=127.261631273 podStartE2EDuration="2m7.261631273s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.221669842 +0000 UTC m=+150.641736657" watchObservedRunningTime="2025-12-05 12:27:51.261631273 +0000 UTC m=+150.681698088" Dec 05 12:27:51 crc kubenswrapper[4784]: W1205 12:27:51.281708 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d1f50e5_5ec7_4b19_ab3f_119001d4695c.slice/crio-9320e1a8f82a61e8de1aac90890dccf112165b7962e83d332200ba34eb5d9735 WatchSource:0}: Error finding container 9320e1a8f82a61e8de1aac90890dccf112165b7962e83d332200ba34eb5d9735: Status 404 returned error can't find the container with id 9320e1a8f82a61e8de1aac90890dccf112165b7962e83d332200ba34eb5d9735 Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.329690 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hb74q" podStartSLOduration=127.329663909 podStartE2EDuration="2m7.329663909s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.262462501 +0000 UTC m=+150.682529326" watchObservedRunningTime="2025-12-05 12:27:51.329663909 +0000 UTC m=+150.749730724" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.353963 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.354418 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.85439704 +0000 UTC m=+151.274463855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.368990 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" podStartSLOduration=127.368968117 podStartE2EDuration="2m7.368968117s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:51.368700087 +0000 UTC m=+150.788766922" watchObservedRunningTime="2025-12-05 12:27:51.368968117 +0000 UTC m=+150.789034942" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.456542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.457018 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:51.95699741 +0000 UTC m=+151.377064235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.561265 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.561771 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.061752764 +0000 UTC m=+151.481819579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.667762 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.668253 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.16824212 +0000 UTC m=+151.588308935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.768782 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.778439 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.278394322 +0000 UTC m=+151.698461137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.880672 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.380640029 +0000 UTC m=+151.800706844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.880240 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:51 crc kubenswrapper[4784]: I1205 12:27:51.982826 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:51 crc kubenswrapper[4784]: E1205 12:27:51.983179 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.483161117 +0000 UTC m=+151.903227932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.041309 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.041722 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.085505 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.107101 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.607075869 +0000 UTC m=+152.027142674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.147628 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" event={"ID":"232f2d6d-4234-4868-9ede-a37034cc5d5b","Type":"ContainerStarted","Data":"4d22e44757a8d208bac629bb9a2b613b86443290e37bae143807ac171da3a8a8"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.159733 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f2nct" event={"ID":"298cdb81-831b-4653-9491-b9215b59b87d","Type":"ContainerStarted","Data":"577ad42198f7da1977a1d56a3b7e868490fc27a81e34907b44592c18ed80a900"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.190477 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:52 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:52 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:52 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.190556 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.206794 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.206956 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.207400 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.707383619 +0000 UTC m=+152.127450434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.215048 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" event={"ID":"b8c401c4-a06d-45b8-87ef-d76236d85453","Type":"ContainerStarted","Data":"99ec592ed759e21a07c18710db9bd3bcc54bb49ddd8707a669733bf6b1fbc5d5"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.215105 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" event={"ID":"b8c401c4-a06d-45b8-87ef-d76236d85453","Type":"ContainerStarted","Data":"c970e2b9dfde2dec39d161fb42ffa5d73d0963f94817bf5f793967ad05197261"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.216065 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.249568 4784 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-dfh8l container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.249622 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" podUID="b8c401c4-a06d-45b8-87ef-d76236d85453" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.257974 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" event={"ID":"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95","Type":"ContainerStarted","Data":"f6dd218efe08091ab7354c278350e23280f0b50d3c215d72952dc8f63e47ea50"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.297973 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" event={"ID":"4d076d7f-77aa-4e21-9189-80c39bc6147d","Type":"ContainerStarted","Data":"cfad2c0ace4a0dd15ded269def7d003f53c7a450506d52b6f346284c00473cb5"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.311064 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.311729 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.811713148 +0000 UTC m=+152.231779963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.380899 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" event={"ID":"b63dfa9f-c0b4-41d2-9a30-10388f0ad077","Type":"ContainerStarted","Data":"196d5a7e677ea49fcbaff62ef33da28a1fd99a5be47fc0c6d5a149236e714de6"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.422324 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.423556 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:52.92353353 +0000 UTC m=+152.343600345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.434638 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" event={"ID":"c0a51b7e-45fa-4c8b-9700-0872a5f49527","Type":"ContainerStarted","Data":"0ad12bf41a83ee38647a6d11038389b4ae671f3b9884634912ee190c9ef72755"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.434695 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" event={"ID":"c0a51b7e-45fa-4c8b-9700-0872a5f49527","Type":"ContainerStarted","Data":"f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.465841 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" event={"ID":"e0da22bc-148f-48ea-98b0-0e316a52b1a1","Type":"ContainerStarted","Data":"4f99d01b71f716e15cf76ad19d69964c3a9d1248d6386277cd03500a828cc782"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.485506 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" event={"ID":"63a61829-2074-4b44-9297-6dcf6236af1a","Type":"ContainerStarted","Data":"7d06ddcbb15501bf1435fac6c5d30efcee5d73b804ce7aac0aa9126bb9263a17"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.532260 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.534262 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.034241171 +0000 UTC m=+152.454313436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.535999 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" event={"ID":"e350bf27-d60f-4f5f-9bc0-460e997fed0c","Type":"ContainerStarted","Data":"c365ccb450f0939c2096353163f9f8a9b5d5407e9d58d9a8840ad27fd3fb870e"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.536378 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.556175 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" event={"ID":"cf617d33-a828-4556-b92f-90cb28dd8d8c","Type":"ContainerStarted","Data":"38bb1ca2ad0f1ac295395095d9589f00370c494af62695b1d0d9e3471f7f4837"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.565813 4784 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-txh4x container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.565856 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.572400 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" event={"ID":"3edbf65e-d134-4f22-9c92-0f51e9350f05","Type":"ContainerStarted","Data":"3492dc90b2bbddc25081e8dca9b16cb4ee907a09d339382d75e3408f4b39605b"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.573248 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.606435 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" event={"ID":"798fd230-84b4-4a83-98db-2e6fd780ca50","Type":"ContainerStarted","Data":"799991616a1b879bf2e8fa46c0d7f0ae4108bc1e11e752e05feae8767c6a364b"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.642667 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.643921 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.143898126 +0000 UTC m=+152.563964941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.648570 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" event={"ID":"0d1f50e5-5ec7-4b19-ab3f-119001d4695c","Type":"ContainerStarted","Data":"9320e1a8f82a61e8de1aac90890dccf112165b7962e83d332200ba34eb5d9735"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.660770 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" podStartSLOduration=129.660733892 podStartE2EDuration="2m9.660733892s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.648977393 +0000 UTC m=+152.069044208" watchObservedRunningTime="2025-12-05 12:27:52.660733892 +0000 UTC m=+152.080800697" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.681295 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" podStartSLOduration=128.681278536 podStartE2EDuration="2m8.681278536s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.680303583 +0000 UTC m=+152.100370398" watchObservedRunningTime="2025-12-05 12:27:52.681278536 +0000 UTC m=+152.101345351" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.692376 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" event={"ID":"99e59e8a-dbaf-4ae4-82d5-505cde15ff2b","Type":"ContainerStarted","Data":"b030a6e3a096bc54242f770a72eabc7461234e735845cc3448c3e4e2b939e990"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.717146 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" podStartSLOduration=128.717128315 podStartE2EDuration="2m8.717128315s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.715615371 +0000 UTC m=+152.135682186" watchObservedRunningTime="2025-12-05 12:27:52.717128315 +0000 UTC m=+152.137195130" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.725056 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" event={"ID":"2a6d9550-f59e-4abd-adcd-6514e8c143bc","Type":"ContainerStarted","Data":"f1b52066ddf9d113d9ac3be3e7203fcec629032813748cc6b2cef0c448c54ecf"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.725094 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" event={"ID":"2a6d9550-f59e-4abd-adcd-6514e8c143bc","Type":"ContainerStarted","Data":"86da6e6c7ea8d6034bf7892af73c2724b2a368a0e6ce67fb097317b7e91cf718"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.739587 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-l8jkp" event={"ID":"7bec0da8-c8ae-453e-95e1-060b8fb71de1","Type":"ContainerStarted","Data":"fe8d06f2280e8ac8118d47b72cc1bee27cdcc79e34c3eb61357851d3eedd5ddd"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.740887 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" event={"ID":"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f","Type":"ContainerStarted","Data":"a75ea484edd9f5ec0822c0bba0048acd2bc8fd2733780a543f64d096f700d067"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.740907 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" event={"ID":"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f","Type":"ContainerStarted","Data":"61232a71ddcdedee946fb2ae584f4704d6d756a151e1e7e34f169a3c1a3af52b"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.746053 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.746386 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.246373642 +0000 UTC m=+152.666440457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.753560 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" event={"ID":"e5f2aa15-edae-4c3f-af17-1fbb714f3f53","Type":"ContainerStarted","Data":"703e68dc9f6e679070364c20542ff9b15589a6ce11da7fe659da881889bc03e4"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.755561 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" podStartSLOduration=129.755546601 podStartE2EDuration="2m9.755546601s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.754428922 +0000 UTC m=+152.174495737" watchObservedRunningTime="2025-12-05 12:27:52.755546601 +0000 UTC m=+152.175613416" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.759536 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" event={"ID":"88e9df39-e23d-4d44-9ca0-148deb5d9809","Type":"ContainerStarted","Data":"cfafaab1411d3b8be8463afd445118f427f1e6dfc0ebcc07e23314e273d87e8f"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.766923 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" event={"ID":"17e06e1c-4128-4e70-b4a7-b83685a7166a","Type":"ContainerStarted","Data":"11169b0bb0ed66920646b1ab2a0e356f81626ea1c46384e64e696fa3c44c9157"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.766965 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" event={"ID":"17e06e1c-4128-4e70-b4a7-b83685a7166a","Type":"ContainerStarted","Data":"0c57fb95dcc10ee7480d2d90b5471c4af895ec24bf4423ec9a74b391f3834526"} Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.793788 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-89565" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.804700 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-fvbbf" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.805277 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.861821 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.865445 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.365414744 +0000 UTC m=+152.785481559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.945717 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" podStartSLOduration=128.945694627 podStartE2EDuration="2m8.945694627s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.897869983 +0000 UTC m=+152.317936798" watchObservedRunningTime="2025-12-05 12:27:52.945694627 +0000 UTC m=+152.365761442" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.987963 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-z2p7n" podStartSLOduration=128.987938437 podStartE2EDuration="2m8.987938437s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:52.948619759 +0000 UTC m=+152.368686574" watchObservedRunningTime="2025-12-05 12:27:52.987938437 +0000 UTC m=+152.408005252" Dec 05 12:27:52 crc kubenswrapper[4784]: I1205 12:27:52.990896 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:52 crc kubenswrapper[4784]: E1205 12:27:52.991743 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.491721899 +0000 UTC m=+152.911788714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.095937 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.097010 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.596985071 +0000 UTC m=+153.017051886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.134057 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-n8hdp" podStartSLOduration=130.13403867 podStartE2EDuration="2m10.13403867s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.089037655 +0000 UTC m=+152.509104470" watchObservedRunningTime="2025-12-05 12:27:53.13403867 +0000 UTC m=+152.554105485" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.134659 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" podStartSLOduration=129.134651521 podStartE2EDuration="2m9.134651521s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.131643987 +0000 UTC m=+152.551710812" watchObservedRunningTime="2025-12-05 12:27:53.134651521 +0000 UTC m=+152.554718336" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.146513 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:53 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:53 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:53 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.146576 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.198518 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.198974 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.698957099 +0000 UTC m=+153.119023914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.213674 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bzfdn" podStartSLOduration=129.213636319 podStartE2EDuration="2m9.213636319s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.208569623 +0000 UTC m=+152.628636458" watchObservedRunningTime="2025-12-05 12:27:53.213636319 +0000 UTC m=+152.633703134" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.299416 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.299806 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.799787308 +0000 UTC m=+153.219854123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.401176 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.401929 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:53.90191696 +0000 UTC m=+153.321983775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.502776 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.502998 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.002967526 +0000 UTC m=+153.423034341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.503269 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.503569 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.003557577 +0000 UTC m=+153.423624392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.603952 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.604100 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.104080105 +0000 UTC m=+153.524146920 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.604215 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.604525 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.10451631 +0000 UTC m=+153.524583125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.705223 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.705476 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.205450782 +0000 UTC m=+153.625517597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.705529 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.705847 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.205833474 +0000 UTC m=+153.625900329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.772611 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" event={"ID":"4d076d7f-77aa-4e21-9189-80c39bc6147d","Type":"ContainerStarted","Data":"acaa127cfd4810eee95e6fdc39d9f749094c037483fbe87dc9632c0b08c51a33"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.774071 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-l8jkp" event={"ID":"7bec0da8-c8ae-453e-95e1-060b8fb71de1","Type":"ContainerStarted","Data":"b1c0c52d7a43524ef56e0f4a5920a68d79e7593752391b0b4fbb999c632ba791"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.775218 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" event={"ID":"0d1f50e5-5ec7-4b19-ab3f-119001d4695c","Type":"ContainerStarted","Data":"5b22e88999cadeac4cc387bb5f22d6aaed05aa75173631ad94d1bfa48ebc3987"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.776382 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" event={"ID":"3de2abbd-45d1-489a-8105-448180433f7d","Type":"ContainerStarted","Data":"2cccc8235ecd453173a0ecf15420da2d0a6a122300a24b1f971aaa0d210084e4"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.777874 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" event={"ID":"cf617d33-a828-4556-b92f-90cb28dd8d8c","Type":"ContainerStarted","Data":"e2bfb47ac9f697e6e5493574883f919e6d19c39433109e69edb30945eca250f1"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.779418 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" event={"ID":"e5f2aa15-edae-4c3f-af17-1fbb714f3f53","Type":"ContainerStarted","Data":"1926aed7544f3c9b1e08dfb84602b4dacc19f17eb132301c5eeb35a5b568a38b"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.781072 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" event={"ID":"232f2d6d-4234-4868-9ede-a37034cc5d5b","Type":"ContainerStarted","Data":"6b64b75a16eb2fa85eab91b85df6f107b5e6d455a806ec780f84d319441421f2"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.781884 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.783372 4784 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wsxgh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.783416 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" podUID="232f2d6d-4234-4868-9ede-a37034cc5d5b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.783641 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" event={"ID":"e0da22bc-148f-48ea-98b0-0e316a52b1a1","Type":"ContainerStarted","Data":"f80514b99aab56ca66c66e72b65eda7ff5e7fec7043e1907f0fdbf7dc7ff8270"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.783897 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.784842 4784 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-fm4wv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.784870 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" podUID="e0da22bc-148f-48ea-98b0-0e316a52b1a1" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.786288 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" event={"ID":"a23cdf56-ddf2-4914-93d8-18b0d5cdd52f","Type":"ContainerStarted","Data":"24fdce0a1a85f623e53dea6f7e14c9567297c414d91c055b0ddb5b8f2d1607f8"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.789052 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-c9str" podStartSLOduration=130.78903719 podStartE2EDuration="2m10.78903719s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.272503748 +0000 UTC m=+152.692570563" watchObservedRunningTime="2025-12-05 12:27:53.78903719 +0000 UTC m=+153.209104015" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.792692 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rbpdj" event={"ID":"63a61829-2074-4b44-9297-6dcf6236af1a","Type":"ContainerStarted","Data":"e152cf15d97a58be4556b69aeb1b6fee94bd39a2731a3c38e481d352b66cfb7e"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.795368 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" event={"ID":"e350bf27-d60f-4f5f-9bc0-460e997fed0c","Type":"ContainerStarted","Data":"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.796299 4784 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-txh4x container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.796347 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.797797 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f2nct" event={"ID":"298cdb81-831b-4653-9491-b9215b59b87d","Type":"ContainerStarted","Data":"41bdc906210921bfe08680143ddb1669084bed2b4acf30112d7d8e2f7c0d6df5"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.799356 4784 generic.go:334] "Generic (PLEG): container finished" podID="88e9df39-e23d-4d44-9ca0-148deb5d9809" containerID="afebe8253e871f617647a826f1a0d2428c52f41d0f20fd019b7924b349370e53" exitCode=0 Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.799608 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" event={"ID":"88e9df39-e23d-4d44-9ca0-148deb5d9809","Type":"ContainerDied","Data":"afebe8253e871f617647a826f1a0d2428c52f41d0f20fd019b7924b349370e53"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.802065 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" event={"ID":"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95","Type":"ContainerStarted","Data":"6b31968667f9d262f34950ad4e0646ee10606bcee4850f082eb13603ddaf79ad"} Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.807829 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.807938 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.307916017 +0000 UTC m=+153.727982832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.808083 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.808413 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.308402114 +0000 UTC m=+153.728468929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.812780 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-df92s" podStartSLOduration=129.812763375 podStartE2EDuration="2m9.812763375s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.812407883 +0000 UTC m=+153.232474718" watchObservedRunningTime="2025-12-05 12:27:53.812763375 +0000 UTC m=+153.232830190" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.815178 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-shp28" podStartSLOduration=129.815161218 podStartE2EDuration="2m9.815161218s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.790887524 +0000 UTC m=+153.210954339" watchObservedRunningTime="2025-12-05 12:27:53.815161218 +0000 UTC m=+153.235228033" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.834317 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ptn7m" podStartSLOduration=129.834279884 podStartE2EDuration="2m9.834279884s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.832399908 +0000 UTC m=+153.252466733" watchObservedRunningTime="2025-12-05 12:27:53.834279884 +0000 UTC m=+153.254346699" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.853908 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" podStartSLOduration=129.853890257 podStartE2EDuration="2m9.853890257s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.85199739 +0000 UTC m=+153.272064215" watchObservedRunningTime="2025-12-05 12:27:53.853890257 +0000 UTC m=+153.273957072" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.870823 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wrh5b" podStartSLOduration=129.870808085 podStartE2EDuration="2m9.870808085s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.868923689 +0000 UTC m=+153.288990504" watchObservedRunningTime="2025-12-05 12:27:53.870808085 +0000 UTC m=+153.290874900" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.889628 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" podStartSLOduration=129.889611119 podStartE2EDuration="2m9.889611119s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.887232267 +0000 UTC m=+153.307299092" watchObservedRunningTime="2025-12-05 12:27:53.889611119 +0000 UTC m=+153.309677934" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.909014 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.909210 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.409152089 +0000 UTC m=+153.829218904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.912421 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:53 crc kubenswrapper[4784]: E1205 12:27:53.923324 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.423308062 +0000 UTC m=+153.843374877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.945585 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-f2nct" podStartSLOduration=9.945565356 podStartE2EDuration="9.945565356s" podCreationTimestamp="2025-12-05 12:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:53.937283888 +0000 UTC m=+153.357350713" watchObservedRunningTime="2025-12-05 12:27:53.945565356 +0000 UTC m=+153.365632171" Dec 05 12:27:53 crc kubenswrapper[4784]: I1205 12:27:53.998845 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dfh8l" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.014455 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.014955 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.51493509 +0000 UTC m=+153.935001915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.116244 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.116953 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.616938089 +0000 UTC m=+154.037004894 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.140419 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:54 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:54 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:54 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.140481 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.217408 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.217604 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.717576181 +0000 UTC m=+154.137642996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.217741 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.218140 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.718131389 +0000 UTC m=+154.138198204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.319592 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.319840 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.819793637 +0000 UTC m=+154.239860452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.320253 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.320616 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.820601175 +0000 UTC m=+154.240667990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.421289 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.421561 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.921528837 +0000 UTC m=+154.341595662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.421637 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.422142 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:54.922121957 +0000 UTC m=+154.342188772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.522840 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.523036 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.023009688 +0000 UTC m=+154.443076503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.523458 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.524038 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.024011532 +0000 UTC m=+154.444078347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.625233 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.625619 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.125591587 +0000 UTC m=+154.545658402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.625853 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.626197 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.126177787 +0000 UTC m=+154.546244602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.726899 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.727059 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.227037886 +0000 UTC m=+154.647104701 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.727167 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.727585 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.227573455 +0000 UTC m=+154.647640280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.808900 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-l8jkp" event={"ID":"7bec0da8-c8ae-453e-95e1-060b8fb71de1","Type":"ContainerStarted","Data":"9afeecf3a97a138f0be6970f1581d647b89ef121054c60d8e2ca3727d6f6a16f"} Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.809964 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-l8jkp" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.811453 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" event={"ID":"3de2abbd-45d1-489a-8105-448180433f7d","Type":"ContainerStarted","Data":"cba14be6d2ed3b7ab021639451a04563b57e148802774ac36f87231efd23c543"} Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.813026 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" event={"ID":"cf617d33-a828-4556-b92f-90cb28dd8d8c","Type":"ContainerStarted","Data":"4c1ee9dc9b06e1c46289be738efde7779e3b0d7c7ee6aa187813125f9dc22903"} Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.815783 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" event={"ID":"c06c3bf6-dd05-4b6a-a64a-5b92a1082c95","Type":"ContainerStarted","Data":"36820087dfcb2b58e3a6f8499c3dbcf4df88a1e6803937a7538dd36fed117b2c"} Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.819235 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" event={"ID":"88e9df39-e23d-4d44-9ca0-148deb5d9809","Type":"ContainerStarted","Data":"7ab8a2454dfd1d6c2a5492796f344233669a8ed21b72b2515135e196540e0c8c"} Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.820326 4784 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-txh4x container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.821123 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.825364 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fm4wv" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.828119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.828551 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.328533077 +0000 UTC m=+154.748599892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.858293 4784 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.892850 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-l8jkp" podStartSLOduration=10.892832404 podStartE2EDuration="10.892832404s" podCreationTimestamp="2025-12-05 12:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:54.847521009 +0000 UTC m=+154.267587824" watchObservedRunningTime="2025-12-05 12:27:54.892832404 +0000 UTC m=+154.312899219" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.893825 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-599vr" podStartSLOduration=130.893818989 podStartE2EDuration="2m10.893818989s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:54.888636009 +0000 UTC m=+154.308702824" watchObservedRunningTime="2025-12-05 12:27:54.893818989 +0000 UTC m=+154.313885804" Dec 05 12:27:54 crc kubenswrapper[4784]: I1205 12:27:54.929551 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:54 crc kubenswrapper[4784]: E1205 12:27:54.933636 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.433617534 +0000 UTC m=+154.853684429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.031639 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:55 crc kubenswrapper[4784]: E1205 12:27:55.031967 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.531949915 +0000 UTC m=+154.952016730 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.078498 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-q7vnr" podStartSLOduration=131.078476944 podStartE2EDuration="2m11.078476944s" podCreationTimestamp="2025-12-05 12:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:55.077981376 +0000 UTC m=+154.498048191" watchObservedRunningTime="2025-12-05 12:27:55.078476944 +0000 UTC m=+154.498543759" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.079967 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" podStartSLOduration=132.079960466 podStartE2EDuration="2m12.079960466s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:55.027608834 +0000 UTC m=+154.447675659" watchObservedRunningTime="2025-12-05 12:27:55.079960466 +0000 UTC m=+154.500027291" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.136122 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:55 crc kubenswrapper[4784]: E1205 12:27:55.136494 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 12:27:55.636479282 +0000 UTC m=+155.056546097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zjhhs" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.140472 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:55 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:55 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:55 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.140541 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.165652 4784 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T12:27:54.858318774Z","Handler":null,"Name":""} Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.195602 4784 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.195699 4784 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.237791 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.286413 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.339768 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.370620 4784 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.370693 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.471209 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zjhhs\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.648555 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsxgh" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.731421 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.838634 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" event={"ID":"3de2abbd-45d1-489a-8105-448180433f7d","Type":"ContainerStarted","Data":"3793f45ea52e70e5aea41b9dc23a893e0b75094435a88f09b904c5923f0d65c2"} Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.838950 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" event={"ID":"3de2abbd-45d1-489a-8105-448180433f7d","Type":"ContainerStarted","Data":"ac998a1f43acb5a4422f099e21020b321d75e12930ac121dfb45f7f77fb5460c"} Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.839895 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:27:55 crc kubenswrapper[4784]: I1205 12:27:55.867784 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-tbmvb" podStartSLOduration=10.867761796 podStartE2EDuration="10.867761796s" podCreationTimestamp="2025-12-05 12:27:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:55.867340281 +0000 UTC m=+155.287407116" watchObservedRunningTime="2025-12-05 12:27:55.867761796 +0000 UTC m=+155.287828611" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.041501 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:27:56 crc kubenswrapper[4784]: W1205 12:27:56.050268 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec9447bc_e76f_4943_9f80_f4d121ff1322.slice/crio-fa2a62ee2ef217b79e7e8af7cbe33c6596068dcd54315ff5f040c752e1577bdf WatchSource:0}: Error finding container fa2a62ee2ef217b79e7e8af7cbe33c6596068dcd54315ff5f040c752e1577bdf: Status 404 returned error can't find the container with id fa2a62ee2ef217b79e7e8af7cbe33c6596068dcd54315ff5f040c752e1577bdf Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.132958 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:56 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:56 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:56 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.133620 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.244595 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.245544 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.247779 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.248430 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.261667 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.371486 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.371544 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.399939 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sf2pd"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.401131 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.403064 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.415130 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sf2pd"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473206 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lxnn\" (UniqueName: \"kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473256 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473408 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473454 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473476 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.473553 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.496124 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.560107 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.576845 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.576909 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lxnn\" (UniqueName: \"kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.576928 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.577591 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.577617 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.582871 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pd864"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.584300 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.589430 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.597314 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pd864"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.600076 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lxnn\" (UniqueName: \"kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn\") pod \"certified-operators-sf2pd\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.678380 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.678618 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.678742 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f4gv\" (UniqueName: \"kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.738751 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.780098 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f4gv\" (UniqueName: \"kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.780218 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.780281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.780924 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.780933 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.781964 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.785330 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.801147 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f4gv\" (UniqueName: \"kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv\") pod \"community-operators-pd864\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.807999 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.827107 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.848227 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"08d4812d-b79e-42fb-9400-5704bbce87c5","Type":"ContainerStarted","Data":"c03446f367c928a8633d5e543735fa5498c97a9a944e79a5703ae6f708f1416e"} Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.849709 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" event={"ID":"ec9447bc-e76f-4943-9f80-f4d121ff1322","Type":"ContainerStarted","Data":"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233"} Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.849729 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" event={"ID":"ec9447bc-e76f-4943-9f80-f4d121ff1322","Type":"ContainerStarted","Data":"fa2a62ee2ef217b79e7e8af7cbe33c6596068dcd54315ff5f040c752e1577bdf"} Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.850481 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.852884 4784 generic.go:334] "Generic (PLEG): container finished" podID="c0a51b7e-45fa-4c8b-9700-0872a5f49527" containerID="0ad12bf41a83ee38647a6d11038389b4ae671f3b9884634912ee190c9ef72755" exitCode=0 Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.853393 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" event={"ID":"c0a51b7e-45fa-4c8b-9700-0872a5f49527","Type":"ContainerDied","Data":"0ad12bf41a83ee38647a6d11038389b4ae671f3b9884634912ee190c9ef72755"} Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.878178 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" podStartSLOduration=133.878162691 podStartE2EDuration="2m13.878162691s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:27:56.877000201 +0000 UTC m=+156.297067016" watchObservedRunningTime="2025-12-05 12:27:56.878162691 +0000 UTC m=+156.298229506" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.942113 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.965168 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sf2pd"] Dec 05 12:27:56 crc kubenswrapper[4784]: W1205 12:27:56.984235 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe2166c8_c56d_46bd_ac93_e6eeb11ecba6.slice/crio-29bd7aaacf9624a49d7f199b85e377c52bff4918f7d9f7818a8991a0b014fad9 WatchSource:0}: Error finding container 29bd7aaacf9624a49d7f199b85e377c52bff4918f7d9f7818a8991a0b014fad9: Status 404 returned error can't find the container with id 29bd7aaacf9624a49d7f199b85e377c52bff4918f7d9f7818a8991a0b014fad9 Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.985028 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml2jk\" (UniqueName: \"kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.985086 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.985128 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.991617 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:27:56 crc kubenswrapper[4784]: I1205 12:27:56.992806 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.011473 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.015834 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.026858 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.028132 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.034934 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087474 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml2jk\" (UniqueName: \"kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087579 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087612 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087645 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087679 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.087767 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj8js\" (UniqueName: \"kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.088791 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.088888 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.112440 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml2jk\" (UniqueName: \"kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk\") pod \"certified-operators-787vm\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.128483 4784 patch_prober.go:28] interesting pod/downloads-7954f5f757-k7w2s container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.128532 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-k7w2s" podUID="30b70430-7471-4b38-a1a2-22d557f5e1ca" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.128681 4784 patch_prober.go:28] interesting pod/downloads-7954f5f757-k7w2s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.128749 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k7w2s" podUID="30b70430-7471-4b38-a1a2-22d557f5e1ca" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.129034 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.132107 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:57 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:57 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:57 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.132154 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.188762 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj8js\" (UniqueName: \"kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.188817 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.188866 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.190636 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.190763 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.215286 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj8js\" (UniqueName: \"kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js\") pod \"community-operators-859cx\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.223787 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pd864"] Dec 05 12:27:57 crc kubenswrapper[4784]: W1205 12:27:57.283397 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6a64333_7a87_462a_996f_b3ce85e43c8f.slice/crio-13e30e863f50f2833454151cc62d9d7b67fb7746ab8743228497f73d09ac8e07 WatchSource:0}: Error finding container 13e30e863f50f2833454151cc62d9d7b67fb7746ab8743228497f73d09ac8e07: Status 404 returned error can't find the container with id 13e30e863f50f2833454151cc62d9d7b67fb7746ab8743228497f73d09ac8e07 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.306924 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.306982 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.313256 4784 patch_prober.go:28] interesting pod/console-f9d7485db-m7m44 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.313313 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-m7m44" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.320876 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.368461 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:27:57 crc kubenswrapper[4784]: W1205 12:27:57.378033 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode73d649d_8c9a_48e9_b5e3_b0d5108a1cb8.slice/crio-79e0034c0288b83f255e99a2b16509bab6cd20ef90cc9b875878cce41e0a4a1b WatchSource:0}: Error finding container 79e0034c0288b83f255e99a2b16509bab6cd20ef90cc9b875878cce41e0a4a1b: Status 404 returned error can't find the container with id 79e0034c0288b83f255e99a2b16509bab6cd20ef90cc9b875878cce41e0a4a1b Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.573554 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:27:57 crc kubenswrapper[4784]: W1205 12:27:57.640431 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ef4cc8a_191e_4e86_9c66_77bdaeca8015.slice/crio-c1c6df859a67784e946fa7bb46b83711054eee0b89f606efda1a238cac02bcfb WatchSource:0}: Error finding container c1c6df859a67784e946fa7bb46b83711054eee0b89f606efda1a238cac02bcfb: Status 404 returned error can't find the container with id c1c6df859a67784e946fa7bb46b83711054eee0b89f606efda1a238cac02bcfb Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.867560 4784 generic.go:334] "Generic (PLEG): container finished" podID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerID="7ad4df051fbef0e385c842f16be985a7cab5b3f8fe70a93f28eb6f709d5d5c67" exitCode=0 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.867678 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerDied","Data":"7ad4df051fbef0e385c842f16be985a7cab5b3f8fe70a93f28eb6f709d5d5c67"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.867748 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerStarted","Data":"c1c6df859a67784e946fa7bb46b83711054eee0b89f606efda1a238cac02bcfb"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.869647 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.870366 4784 generic.go:334] "Generic (PLEG): container finished" podID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerID="711e8408b9095c73eb740204557a1fd10b7364c23c85d7c08cd4cc93476b63b1" exitCode=0 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.870440 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerDied","Data":"711e8408b9095c73eb740204557a1fd10b7364c23c85d7c08cd4cc93476b63b1"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.870469 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerStarted","Data":"79e0034c0288b83f255e99a2b16509bab6cd20ef90cc9b875878cce41e0a4a1b"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.873403 4784 generic.go:334] "Generic (PLEG): container finished" podID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerID="a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6" exitCode=0 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.873471 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerDied","Data":"a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.873499 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerStarted","Data":"13e30e863f50f2833454151cc62d9d7b67fb7746ab8743228497f73d09ac8e07"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.875943 4784 generic.go:334] "Generic (PLEG): container finished" podID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerID="890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306" exitCode=0 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.876074 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerDied","Data":"890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.876114 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerStarted","Data":"29bd7aaacf9624a49d7f199b85e377c52bff4918f7d9f7818a8991a0b014fad9"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.879412 4784 generic.go:334] "Generic (PLEG): container finished" podID="08d4812d-b79e-42fb-9400-5704bbce87c5" containerID="c4f6aedbb43561b6a2b76074a97530bb9e88844789df020292cc312728034ea9" exitCode=0 Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.879679 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"08d4812d-b79e-42fb-9400-5704bbce87c5","Type":"ContainerDied","Data":"c4f6aedbb43561b6a2b76074a97530bb9e88844789df020292cc312728034ea9"} Dec 05 12:27:57 crc kubenswrapper[4784]: I1205 12:27:57.889496 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-g67wl" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.060846 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.128587 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.131244 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:58 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:58 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:58 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.131278 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.183843 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.312867 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume\") pod \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.313260 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume\") pod \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.313368 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqb85\" (UniqueName: \"kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85\") pod \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\" (UID: \"c0a51b7e-45fa-4c8b-9700-0872a5f49527\") " Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.313679 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume" (OuterVolumeSpecName: "config-volume") pod "c0a51b7e-45fa-4c8b-9700-0872a5f49527" (UID: "c0a51b7e-45fa-4c8b-9700-0872a5f49527"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.320045 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c0a51b7e-45fa-4c8b-9700-0872a5f49527" (UID: "c0a51b7e-45fa-4c8b-9700-0872a5f49527"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.320558 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85" (OuterVolumeSpecName: "kube-api-access-fqb85") pod "c0a51b7e-45fa-4c8b-9700-0872a5f49527" (UID: "c0a51b7e-45fa-4c8b-9700-0872a5f49527"). InnerVolumeSpecName "kube-api-access-fqb85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.381449 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hkkm9"] Dec 05 12:27:58 crc kubenswrapper[4784]: E1205 12:27:58.381650 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0a51b7e-45fa-4c8b-9700-0872a5f49527" containerName="collect-profiles" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.381662 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0a51b7e-45fa-4c8b-9700-0872a5f49527" containerName="collect-profiles" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.381760 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0a51b7e-45fa-4c8b-9700-0872a5f49527" containerName="collect-profiles" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.382509 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.388356 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.413175 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hkkm9"] Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.415029 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0a51b7e-45fa-4c8b-9700-0872a5f49527-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.415053 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0a51b7e-45fa-4c8b-9700-0872a5f49527-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.415066 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqb85\" (UniqueName: \"kubernetes.io/projected/c0a51b7e-45fa-4c8b-9700-0872a5f49527-kube-api-access-fqb85\") on node \"crc\" DevicePath \"\"" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.516857 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.516979 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.517006 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxshj\" (UniqueName: \"kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.617838 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.617912 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.617943 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxshj\" (UniqueName: \"kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.619314 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.619545 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.639846 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxshj\" (UniqueName: \"kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj\") pod \"redhat-marketplace-hkkm9\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.704068 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.783016 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.801903 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.809555 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.897313 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" event={"ID":"c0a51b7e-45fa-4c8b-9700-0872a5f49527","Type":"ContainerDied","Data":"f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be"} Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.897364 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9e6172c41b34b6fcf10d814576261c172095c1f8e24d9b1a4a574e40c7027be" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.897452 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.937925 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvbr6\" (UniqueName: \"kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.938018 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.938224 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:58 crc kubenswrapper[4784]: I1205 12:27:58.946704 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hkkm9"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.039219 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvbr6\" (UniqueName: \"kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.039313 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.039416 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.040480 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.040755 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.057651 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvbr6\" (UniqueName: \"kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6\") pod \"redhat-marketplace-x9c8w\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.113407 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.128171 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.131359 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:27:59 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:27:59 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:27:59 crc kubenswrapper[4784]: healthz check failed Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.131409 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.163587 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:27:59 crc kubenswrapper[4784]: E1205 12:27:59.164003 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d4812d-b79e-42fb-9400-5704bbce87c5" containerName="pruner" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.164023 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d4812d-b79e-42fb-9400-5704bbce87c5" containerName="pruner" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.164174 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="08d4812d-b79e-42fb-9400-5704bbce87c5" containerName="pruner" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.164723 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.168561 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.168683 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.168843 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.241634 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir\") pod \"08d4812d-b79e-42fb-9400-5704bbce87c5\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.241954 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access\") pod \"08d4812d-b79e-42fb-9400-5704bbce87c5\" (UID: \"08d4812d-b79e-42fb-9400-5704bbce87c5\") " Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.241736 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "08d4812d-b79e-42fb-9400-5704bbce87c5" (UID: "08d4812d-b79e-42fb-9400-5704bbce87c5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.242300 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.242332 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.242429 4784 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/08d4812d-b79e-42fb-9400-5704bbce87c5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.245157 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "08d4812d-b79e-42fb-9400-5704bbce87c5" (UID: "08d4812d-b79e-42fb-9400-5704bbce87c5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.343141 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.343181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.343329 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/08d4812d-b79e-42fb-9400-5704bbce87c5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.343383 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.365711 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.367326 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:27:59 crc kubenswrapper[4784]: W1205 12:27:59.392246 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f4fce3b_5270_43f8_9a95_b9c15beb8bd1.slice/crio-94b81b48ad705d38763709d5a8825039e3df01417a0c8e53a145fbd24eab992c WatchSource:0}: Error finding container 94b81b48ad705d38763709d5a8825039e3df01417a0c8e53a145fbd24eab992c: Status 404 returned error can't find the container with id 94b81b48ad705d38763709d5a8825039e3df01417a0c8e53a145fbd24eab992c Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.482928 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.572708 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.573059 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.577084 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vlvmb"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.578273 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.580137 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.593669 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vlvmb"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.710404 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.748845 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xbq9\" (UniqueName: \"kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.748944 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.748989 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.849968 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xbq9\" (UniqueName: \"kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.850026 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.850064 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.850545 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.850679 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.870164 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xbq9\" (UniqueName: \"kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9\") pod \"redhat-operators-vlvmb\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.904236 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerStarted","Data":"439a9d55b10105aabca347dbf4ebd885e6520d633ba65f17d0c6958c1d4bc84d"} Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.906829 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.906844 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"08d4812d-b79e-42fb-9400-5704bbce87c5","Type":"ContainerDied","Data":"c03446f367c928a8633d5e543735fa5498c97a9a944e79a5703ae6f708f1416e"} Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.906883 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c03446f367c928a8633d5e543735fa5498c97a9a944e79a5703ae6f708f1416e" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.909557 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7ece1677-b499-44c3-aa54-790e0314b6a2","Type":"ContainerStarted","Data":"206a20eef6cf0c969d8389bd061c1495fc330693c458590ed3359a1e3aa3a635"} Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.911318 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerStarted","Data":"94b81b48ad705d38763709d5a8825039e3df01417a0c8e53a145fbd24eab992c"} Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.915547 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.980997 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.982857 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:27:59 crc kubenswrapper[4784]: I1205 12:27:59.997599 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.004005 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wfsr7" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.053209 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.053406 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7562b\" (UniqueName: \"kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.053439 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.134508 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:00 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:00 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:00 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.134568 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.154377 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7562b\" (UniqueName: \"kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.154435 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.154497 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.155903 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.155943 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.176239 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vlvmb"] Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.183238 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7562b\" (UniqueName: \"kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b\") pod \"redhat-operators-5gkgh\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.305157 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.489465 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:28:00 crc kubenswrapper[4784]: W1205 12:28:00.494021 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31ce912b_e8c6_4b90_b8ed_d2051dce1232.slice/crio-25c60e0ea5715d50fcdc1914d1aecf9d90c3b620537330e4b2cd935d614468b4 WatchSource:0}: Error finding container 25c60e0ea5715d50fcdc1914d1aecf9d90c3b620537330e4b2cd935d614468b4: Status 404 returned error can't find the container with id 25c60e0ea5715d50fcdc1914d1aecf9d90c3b620537330e4b2cd935d614468b4 Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.917153 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerStarted","Data":"25c60e0ea5715d50fcdc1914d1aecf9d90c3b620537330e4b2cd935d614468b4"} Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.918259 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerStarted","Data":"9c8fbadb000f04d87d83c5c109b197f6cfe898ed22830c7225cff0880da829a6"} Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.919654 4784 generic.go:334] "Generic (PLEG): container finished" podID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerID="9d98bde74b060f758faa4205ce89b973e81327287496df2e2b8bc42eb4036271" exitCode=0 Dec 05 12:28:00 crc kubenswrapper[4784]: I1205 12:28:00.919695 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerDied","Data":"9d98bde74b060f758faa4205ce89b973e81327287496df2e2b8bc42eb4036271"} Dec 05 12:28:01 crc kubenswrapper[4784]: I1205 12:28:01.132426 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:01 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:01 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:01 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:01 crc kubenswrapper[4784]: I1205 12:28:01.132797 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:01 crc kubenswrapper[4784]: I1205 12:28:01.925658 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerStarted","Data":"f8056b1959f5eefb30415033ed3d8ace5dcc7209d7e3d1344e0b2cba921830ee"} Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.130562 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:02 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:02 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:02 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.130684 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.877852 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-l8jkp" Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.933832 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7ece1677-b499-44c3-aa54-790e0314b6a2","Type":"ContainerStarted","Data":"ff92c3b1ff20d00a9bd5e104de35d00311e97f3e268503c6380d8c641d736cd9"} Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.935734 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerID="f8056b1959f5eefb30415033ed3d8ace5dcc7209d7e3d1344e0b2cba921830ee" exitCode=0 Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.935783 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerDied","Data":"f8056b1959f5eefb30415033ed3d8ace5dcc7209d7e3d1344e0b2cba921830ee"} Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.943915 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerStarted","Data":"47d14e12ef59712ee4a3b75660d020bfa1dc6660fcbc3a528ed44086e5549508"} Dec 05 12:28:02 crc kubenswrapper[4784]: I1205 12:28:02.946081 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerStarted","Data":"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751"} Dec 05 12:28:03 crc kubenswrapper[4784]: I1205 12:28:03.130984 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:03 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:03 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:03 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:03 crc kubenswrapper[4784]: I1205 12:28:03.131054 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:03 crc kubenswrapper[4784]: I1205 12:28:03.962810 4784 generic.go:334] "Generic (PLEG): container finished" podID="7ece1677-b499-44c3-aa54-790e0314b6a2" containerID="ff92c3b1ff20d00a9bd5e104de35d00311e97f3e268503c6380d8c641d736cd9" exitCode=0 Dec 05 12:28:03 crc kubenswrapper[4784]: I1205 12:28:03.962859 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7ece1677-b499-44c3-aa54-790e0314b6a2","Type":"ContainerDied","Data":"ff92c3b1ff20d00a9bd5e104de35d00311e97f3e268503c6380d8c641d736cd9"} Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.002783 4784 generic.go:334] "Generic (PLEG): container finished" podID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerID="47d14e12ef59712ee4a3b75660d020bfa1dc6660fcbc3a528ed44086e5549508" exitCode=0 Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.002908 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerDied","Data":"47d14e12ef59712ee4a3b75660d020bfa1dc6660fcbc3a528ed44086e5549508"} Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.009992 4784 generic.go:334] "Generic (PLEG): container finished" podID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerID="580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751" exitCode=0 Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.010025 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerDied","Data":"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751"} Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.136442 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:04 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:04 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:04 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:04 crc kubenswrapper[4784]: I1205 12:28:04.136500 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.133629 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:05 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:05 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:05 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.133943 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.421609 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.544336 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access\") pod \"7ece1677-b499-44c3-aa54-790e0314b6a2\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.544487 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir\") pod \"7ece1677-b499-44c3-aa54-790e0314b6a2\" (UID: \"7ece1677-b499-44c3-aa54-790e0314b6a2\") " Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.544751 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7ece1677-b499-44c3-aa54-790e0314b6a2" (UID: "7ece1677-b499-44c3-aa54-790e0314b6a2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.569314 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7ece1677-b499-44c3-aa54-790e0314b6a2" (UID: "7ece1677-b499-44c3-aa54-790e0314b6a2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.645490 4784 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ece1677-b499-44c3-aa54-790e0314b6a2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:05 crc kubenswrapper[4784]: I1205 12:28:05.645526 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ece1677-b499-44c3-aa54-790e0314b6a2-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.038381 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7ece1677-b499-44c3-aa54-790e0314b6a2","Type":"ContainerDied","Data":"206a20eef6cf0c969d8389bd061c1495fc330693c458590ed3359a1e3aa3a635"} Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.038734 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="206a20eef6cf0c969d8389bd061c1495fc330693c458590ed3359a1e3aa3a635" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.038426 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.131627 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:06 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:06 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:06 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.131689 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.657483 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.662038 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/531c2cfd-8b93-4ec4-88ab-fb4e40de2543-metrics-certs\") pod \"network-metrics-daemon-ln9ct\" (UID: \"531c2cfd-8b93-4ec4-88ab-fb4e40de2543\") " pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:28:06 crc kubenswrapper[4784]: I1205 12:28:06.714781 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ln9ct" Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.066681 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ln9ct"] Dec 05 12:28:07 crc kubenswrapper[4784]: W1205 12:28:07.096398 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod531c2cfd_8b93_4ec4_88ab_fb4e40de2543.slice/crio-67531215f45458a963bd1953d594ae8bb6f37024efd227556dd73a5ab323f5c1 WatchSource:0}: Error finding container 67531215f45458a963bd1953d594ae8bb6f37024efd227556dd73a5ab323f5c1: Status 404 returned error can't find the container with id 67531215f45458a963bd1953d594ae8bb6f37024efd227556dd73a5ab323f5c1 Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.141155 4784 patch_prober.go:28] interesting pod/router-default-5444994796-wpt8s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 12:28:07 crc kubenswrapper[4784]: [-]has-synced failed: reason withheld Dec 05 12:28:07 crc kubenswrapper[4784]: [+]process-running ok Dec 05 12:28:07 crc kubenswrapper[4784]: healthz check failed Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.141229 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wpt8s" podUID="095096dd-0a14-4993-80fb-c332ae212107" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.146573 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-k7w2s" Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.307548 4784 patch_prober.go:28] interesting pod/console-f9d7485db-m7m44 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 12:28:07 crc kubenswrapper[4784]: I1205 12:28:07.307625 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-m7m44" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 12:28:08 crc kubenswrapper[4784]: I1205 12:28:08.060051 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" event={"ID":"531c2cfd-8b93-4ec4-88ab-fb4e40de2543","Type":"ContainerStarted","Data":"c7d35f5cb2dab0be4a7744c8e9028935c83765f86e11350cf4f0f02d5de6e5b3"} Dec 05 12:28:08 crc kubenswrapper[4784]: I1205 12:28:08.060453 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" event={"ID":"531c2cfd-8b93-4ec4-88ab-fb4e40de2543","Type":"ContainerStarted","Data":"67531215f45458a963bd1953d594ae8bb6f37024efd227556dd73a5ab323f5c1"} Dec 05 12:28:08 crc kubenswrapper[4784]: I1205 12:28:08.131364 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:28:08 crc kubenswrapper[4784]: I1205 12:28:08.138052 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-wpt8s" Dec 05 12:28:15 crc kubenswrapper[4784]: I1205 12:28:15.738961 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:28:17 crc kubenswrapper[4784]: I1205 12:28:17.111226 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ln9ct" event={"ID":"531c2cfd-8b93-4ec4-88ab-fb4e40de2543","Type":"ContainerStarted","Data":"104990d28e100cdc7fd2461a68fdeee4f37b3d6f7061c218dca1020c15e66fe2"} Dec 05 12:28:17 crc kubenswrapper[4784]: I1205 12:28:17.134933 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ln9ct" podStartSLOduration=154.134892115 podStartE2EDuration="2m34.134892115s" podCreationTimestamp="2025-12-05 12:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:17.131533458 +0000 UTC m=+176.551600283" watchObservedRunningTime="2025-12-05 12:28:17.134892115 +0000 UTC m=+176.554958930" Dec 05 12:28:17 crc kubenswrapper[4784]: I1205 12:28:17.311898 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:28:17 crc kubenswrapper[4784]: I1205 12:28:17.317265 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:28:27 crc kubenswrapper[4784]: I1205 12:28:27.770527 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qbc5m" Dec 05 12:28:27 crc kubenswrapper[4784]: I1205 12:28:27.904402 4784 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-tcnkb container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 12:28:27 crc kubenswrapper[4784]: I1205 12:28:27.904483 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-tcnkb" podUID="260e147b-2517-481c-93f4-3335794f5a1e" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:28:28 crc kubenswrapper[4784]: I1205 12:28:28.873720 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 12:28:29 crc kubenswrapper[4784]: I1205 12:28:29.580162 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:28:29 crc kubenswrapper[4784]: I1205 12:28:29.580233 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:28:32 crc kubenswrapper[4784]: E1205 12:28:32.499915 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 12:28:32 crc kubenswrapper[4784]: E1205 12:28:32.500115 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tj8js,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-859cx_openshift-marketplace(1ef4cc8a-191e-4e86-9c66-77bdaeca8015): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:32 crc kubenswrapper[4784]: E1205 12:28:32.501259 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-859cx" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.169517 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:28:33 crc kubenswrapper[4784]: E1205 12:28:33.169789 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ece1677-b499-44c3-aa54-790e0314b6a2" containerName="pruner" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.169806 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ece1677-b499-44c3-aa54-790e0314b6a2" containerName="pruner" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.170278 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ece1677-b499-44c3-aa54-790e0314b6a2" containerName="pruner" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.170762 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.175295 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.175523 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.182035 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.235583 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.235684 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.337038 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.337127 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.337215 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.353971 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:33 crc kubenswrapper[4784]: I1205 12:28:33.510372 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:34 crc kubenswrapper[4784]: E1205 12:28:34.685175 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-859cx" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" Dec 05 12:28:34 crc kubenswrapper[4784]: E1205 12:28:34.763696 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 12:28:34 crc kubenswrapper[4784]: E1205 12:28:34.764055 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7f4gv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pd864_openshift-marketplace(f6a64333-7a87-462a-996f-b3ce85e43c8f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:34 crc kubenswrapper[4784]: E1205 12:28:34.765568 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pd864" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.231455 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pd864" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.243368 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.243514 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nvbr6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-x9c8w_openshift-marketplace(2f4fce3b-5270-43f8-9a95-b9c15beb8bd1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.244684 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-x9c8w" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.355725 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.355891 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pxshj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-hkkm9_openshift-marketplace(6d81f1d0-3e85-443a-a738-2e0d9302d327): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.357088 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-hkkm9" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.809526 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.809720 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ml2jk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-787vm_openshift-marketplace(e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:36 crc kubenswrapper[4784]: E1205 12:28:36.810890 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-787vm" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" Dec 05 12:28:38 crc kubenswrapper[4784]: I1205 12:28:38.958092 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:28:38 crc kubenswrapper[4784]: I1205 12:28:38.959996 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:38 crc kubenswrapper[4784]: I1205 12:28:38.971654 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.033437 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.033502 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.033613 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.135443 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.135521 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.135552 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.135636 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.135953 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.169375 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access\") pod \"installer-9-crc\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:39 crc kubenswrapper[4784]: I1205 12:28:39.299615 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:28:42 crc kubenswrapper[4784]: E1205 12:28:42.808944 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-787vm" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" Dec 05 12:28:42 crc kubenswrapper[4784]: E1205 12:28:42.809322 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-x9c8w" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" Dec 05 12:28:42 crc kubenswrapper[4784]: E1205 12:28:42.944940 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 12:28:42 crc kubenswrapper[4784]: E1205 12:28:42.945432 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7562b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-5gkgh_openshift-marketplace(31ce912b-e8c6-4b90-b8ed-d2051dce1232): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:42 crc kubenswrapper[4784]: E1205 12:28:42.947210 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-5gkgh" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.078344 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.078497 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2lxnn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-sf2pd_openshift-marketplace(be2166c8-c56d-46bd-ac93-e6eeb11ecba6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.079716 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-sf2pd" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" Dec 05 12:28:43 crc kubenswrapper[4784]: I1205 12:28:43.210650 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 12:28:43 crc kubenswrapper[4784]: I1205 12:28:43.265354 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"87ee6b9e-80b7-41da-9f5c-980c8a27bea5","Type":"ContainerStarted","Data":"7a68d6eb93abdac4638f13d4952523c9336e8486a44c2229f96e78c7f5d57aee"} Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.268234 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-5gkgh" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.269236 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-sf2pd" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" Dec 05 12:28:43 crc kubenswrapper[4784]: I1205 12:28:43.279497 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 12:28:43 crc kubenswrapper[4784]: W1205 12:28:43.295010 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod390fc8b7_86b2_4ecc_a41f_1fffd11b1a22.slice/crio-6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece WatchSource:0}: Error finding container 6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece: Status 404 returned error can't find the container with id 6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.351244 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.351583 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4xbq9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vlvmb_openshift-marketplace(626019ff-24ba-4b81-b6ad-ba7c7085fa55): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:28:43 crc kubenswrapper[4784]: E1205 12:28:43.353284 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vlvmb" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" Dec 05 12:28:44 crc kubenswrapper[4784]: I1205 12:28:44.271411 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22","Type":"ContainerStarted","Data":"6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece"} Dec 05 12:28:44 crc kubenswrapper[4784]: E1205 12:28:44.274907 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vlvmb" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" Dec 05 12:28:45 crc kubenswrapper[4784]: I1205 12:28:45.279989 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"87ee6b9e-80b7-41da-9f5c-980c8a27bea5","Type":"ContainerStarted","Data":"4b375f445b420437e4dbe58b553e1381ea779e0dbd5130cd26ace553fa558b95"} Dec 05 12:28:45 crc kubenswrapper[4784]: I1205 12:28:45.282050 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22","Type":"ContainerStarted","Data":"ecbf27aef13c40bf29d3a984c126a1ebb575be7acd546abc0ea4a42e55a21d9b"} Dec 05 12:28:45 crc kubenswrapper[4784]: I1205 12:28:45.297945 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=12.297912892 podStartE2EDuration="12.297912892s" podCreationTimestamp="2025-12-05 12:28:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:45.29549386 +0000 UTC m=+204.715560675" watchObservedRunningTime="2025-12-05 12:28:45.297912892 +0000 UTC m=+204.717979707" Dec 05 12:28:45 crc kubenswrapper[4784]: I1205 12:28:45.314579 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=7.314541349 podStartE2EDuration="7.314541349s" podCreationTimestamp="2025-12-05 12:28:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:28:45.309838619 +0000 UTC m=+204.729905434" watchObservedRunningTime="2025-12-05 12:28:45.314541349 +0000 UTC m=+204.734608164" Dec 05 12:28:46 crc kubenswrapper[4784]: I1205 12:28:46.291708 4784 generic.go:334] "Generic (PLEG): container finished" podID="87ee6b9e-80b7-41da-9f5c-980c8a27bea5" containerID="4b375f445b420437e4dbe58b553e1381ea779e0dbd5130cd26ace553fa558b95" exitCode=0 Dec 05 12:28:46 crc kubenswrapper[4784]: I1205 12:28:46.291839 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"87ee6b9e-80b7-41da-9f5c-980c8a27bea5","Type":"ContainerDied","Data":"4b375f445b420437e4dbe58b553e1381ea779e0dbd5130cd26ace553fa558b95"} Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.544116 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.661474 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir\") pod \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.661635 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "87ee6b9e-80b7-41da-9f5c-980c8a27bea5" (UID: "87ee6b9e-80b7-41da-9f5c-980c8a27bea5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.661768 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access\") pod \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\" (UID: \"87ee6b9e-80b7-41da-9f5c-980c8a27bea5\") " Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.662077 4784 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.667753 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "87ee6b9e-80b7-41da-9f5c-980c8a27bea5" (UID: "87ee6b9e-80b7-41da-9f5c-980c8a27bea5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:28:47 crc kubenswrapper[4784]: I1205 12:28:47.763287 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87ee6b9e-80b7-41da-9f5c-980c8a27bea5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:28:48 crc kubenswrapper[4784]: I1205 12:28:48.303400 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"87ee6b9e-80b7-41da-9f5c-980c8a27bea5","Type":"ContainerDied","Data":"7a68d6eb93abdac4638f13d4952523c9336e8486a44c2229f96e78c7f5d57aee"} Dec 05 12:28:48 crc kubenswrapper[4784]: I1205 12:28:48.303445 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a68d6eb93abdac4638f13d4952523c9336e8486a44c2229f96e78c7f5d57aee" Dec 05 12:28:48 crc kubenswrapper[4784]: I1205 12:28:48.303477 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 12:28:50 crc kubenswrapper[4784]: I1205 12:28:50.317605 4784 generic.go:334] "Generic (PLEG): container finished" podID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerID="e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e" exitCode=0 Dec 05 12:28:50 crc kubenswrapper[4784]: I1205 12:28:50.317651 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerDied","Data":"e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e"} Dec 05 12:28:51 crc kubenswrapper[4784]: I1205 12:28:51.324978 4784 generic.go:334] "Generic (PLEG): container finished" podID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerID="017f3291667cb26ccdaed3d1116f006c036167984eeac09932a33eeba917b25d" exitCode=0 Dec 05 12:28:51 crc kubenswrapper[4784]: I1205 12:28:51.325093 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerDied","Data":"017f3291667cb26ccdaed3d1116f006c036167984eeac09932a33eeba917b25d"} Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.335937 4784 generic.go:334] "Generic (PLEG): container finished" podID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerID="e060f096a1f978333ef685540163907070aba3829d4ee9538807dc9d08399a7b" exitCode=0 Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.336017 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerDied","Data":"e060f096a1f978333ef685540163907070aba3829d4ee9538807dc9d08399a7b"} Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.338717 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerStarted","Data":"18b45db220f566ebf5287f2b5f2765dca75979e31d0a18fb62f57c391dfc2285"} Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.342560 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerStarted","Data":"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5"} Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.376127 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pd864" podStartSLOduration=3.224569843 podStartE2EDuration="56.376106261s" podCreationTimestamp="2025-12-05 12:27:56 +0000 UTC" firstStartedPulling="2025-12-05 12:27:57.874772257 +0000 UTC m=+157.294839072" lastFinishedPulling="2025-12-05 12:28:51.026308675 +0000 UTC m=+210.446375490" observedRunningTime="2025-12-05 12:28:52.371216023 +0000 UTC m=+211.791282839" watchObservedRunningTime="2025-12-05 12:28:52.376106261 +0000 UTC m=+211.796173066" Dec 05 12:28:52 crc kubenswrapper[4784]: I1205 12:28:52.393896 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-859cx" podStartSLOduration=2.309884969 podStartE2EDuration="56.393878805s" podCreationTimestamp="2025-12-05 12:27:56 +0000 UTC" firstStartedPulling="2025-12-05 12:27:57.869332638 +0000 UTC m=+157.289399453" lastFinishedPulling="2025-12-05 12:28:51.953326474 +0000 UTC m=+211.373393289" observedRunningTime="2025-12-05 12:28:52.390752069 +0000 UTC m=+211.810818884" watchObservedRunningTime="2025-12-05 12:28:52.393878805 +0000 UTC m=+211.813945620" Dec 05 12:28:53 crc kubenswrapper[4784]: I1205 12:28:53.349863 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerStarted","Data":"2ef1331e31888aa89cecb3fb44bc46be65bf27fdc801d727e6596e3f734de92d"} Dec 05 12:28:53 crc kubenswrapper[4784]: I1205 12:28:53.370342 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hkkm9" podStartSLOduration=5.625963172 podStartE2EDuration="55.370325488s" podCreationTimestamp="2025-12-05 12:27:58 +0000 UTC" firstStartedPulling="2025-12-05 12:28:02.950154078 +0000 UTC m=+162.370220903" lastFinishedPulling="2025-12-05 12:28:52.694516404 +0000 UTC m=+212.114583219" observedRunningTime="2025-12-05 12:28:53.369388065 +0000 UTC m=+212.789454890" watchObservedRunningTime="2025-12-05 12:28:53.370325488 +0000 UTC m=+212.790392303" Dec 05 12:28:56 crc kubenswrapper[4784]: I1205 12:28:56.943196 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:28:56 crc kubenswrapper[4784]: I1205 12:28:56.943729 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.299901 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.321440 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.321506 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.360398 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.410715 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:28:57 crc kubenswrapper[4784]: I1205 12:28:57.414542 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.377379 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerStarted","Data":"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b"} Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.379983 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerStarted","Data":"6c63cf2b43b166598c8b91b5157e8fa3edc6eedc237be44a5a4edb99f3be15cd"} Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.381868 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerStarted","Data":"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012"} Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.705126 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.705441 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:28:58 crc kubenswrapper[4784]: I1205 12:28:58.748264 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.390936 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerID="79cfb6e2ef67c1e82b5d7e84f865fd0662dbdd326a11f610bb9b4e274409c966" exitCode=0 Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.391010 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerDied","Data":"79cfb6e2ef67c1e82b5d7e84f865fd0662dbdd326a11f610bb9b4e274409c966"} Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.394162 4784 generic.go:334] "Generic (PLEG): container finished" podID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerID="d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b" exitCode=0 Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.394304 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerDied","Data":"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b"} Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.397079 4784 generic.go:334] "Generic (PLEG): container finished" podID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerID="6c63cf2b43b166598c8b91b5157e8fa3edc6eedc237be44a5a4edb99f3be15cd" exitCode=0 Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.397162 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerDied","Data":"6c63cf2b43b166598c8b91b5157e8fa3edc6eedc237be44a5a4edb99f3be15cd"} Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.406247 4784 generic.go:334] "Generic (PLEG): container finished" podID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerID="3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012" exitCode=0 Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.406473 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerDied","Data":"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012"} Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.450300 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.573324 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.573418 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.573486 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.574403 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:28:59 crc kubenswrapper[4784]: I1205 12:28:59.574533 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe" gracePeriod=600 Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.036717 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.037549 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-859cx" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="registry-server" containerID="cri-o://18b45db220f566ebf5287f2b5f2765dca75979e31d0a18fb62f57c391dfc2285" gracePeriod=2 Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.414439 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerStarted","Data":"8540c31507a9c4a41c615447b31e96aae992e97ab7c16629b31d402455adcfa3"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.418044 4784 generic.go:334] "Generic (PLEG): container finished" podID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerID="18b45db220f566ebf5287f2b5f2765dca75979e31d0a18fb62f57c391dfc2285" exitCode=0 Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.418119 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerDied","Data":"18b45db220f566ebf5287f2b5f2765dca75979e31d0a18fb62f57c391dfc2285"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.422600 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerStarted","Data":"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.425369 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe" exitCode=0 Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.425485 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.425534 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.433893 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerStarted","Data":"17e8b8cd92943fe966f43f8b257505397919f739ea316acec8ee2c14e6da03f3"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.436174 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.436949 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerStarted","Data":"f34482c5d65a7fda67646fe0f7c2ce10f444dbd5c7b2eb24164a74d9df64883a"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.443376 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-787vm" podStartSLOduration=2.453971779 podStartE2EDuration="1m4.44335878s" podCreationTimestamp="2025-12-05 12:27:56 +0000 UTC" firstStartedPulling="2025-12-05 12:27:57.871932318 +0000 UTC m=+157.291999133" lastFinishedPulling="2025-12-05 12:28:59.861319319 +0000 UTC m=+219.281386134" observedRunningTime="2025-12-05 12:29:00.438048569 +0000 UTC m=+219.858115404" watchObservedRunningTime="2025-12-05 12:29:00.44335878 +0000 UTC m=+219.863425585" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.446680 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerStarted","Data":"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75"} Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.515881 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sf2pd" podStartSLOduration=2.586839327 podStartE2EDuration="1m4.515864019s" podCreationTimestamp="2025-12-05 12:27:56 +0000 UTC" firstStartedPulling="2025-12-05 12:27:57.877328886 +0000 UTC m=+157.297395701" lastFinishedPulling="2025-12-05 12:28:59.806353578 +0000 UTC m=+219.226420393" observedRunningTime="2025-12-05 12:29:00.511677636 +0000 UTC m=+219.931744461" watchObservedRunningTime="2025-12-05 12:29:00.515864019 +0000 UTC m=+219.935930834" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.545243 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tj8js\" (UniqueName: \"kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js\") pod \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.545313 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content\") pod \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.545333 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities\") pod \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\" (UID: \"1ef4cc8a-191e-4e86-9c66-77bdaeca8015\") " Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.546321 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities" (OuterVolumeSpecName: "utilities") pod "1ef4cc8a-191e-4e86-9c66-77bdaeca8015" (UID: "1ef4cc8a-191e-4e86-9c66-77bdaeca8015"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.550026 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x9c8w" podStartSLOduration=5.551239402 podStartE2EDuration="1m2.550006721s" podCreationTimestamp="2025-12-05 12:27:58 +0000 UTC" firstStartedPulling="2025-12-05 12:28:02.942819643 +0000 UTC m=+162.362886458" lastFinishedPulling="2025-12-05 12:28:59.941586962 +0000 UTC m=+219.361653777" observedRunningTime="2025-12-05 12:29:00.546135299 +0000 UTC m=+219.966202124" watchObservedRunningTime="2025-12-05 12:29:00.550006721 +0000 UTC m=+219.970073536" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.556400 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js" (OuterVolumeSpecName: "kube-api-access-tj8js") pod "1ef4cc8a-191e-4e86-9c66-77bdaeca8015" (UID: "1ef4cc8a-191e-4e86-9c66-77bdaeca8015"). InnerVolumeSpecName "kube-api-access-tj8js". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.565097 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vlvmb" podStartSLOduration=5.730828911 podStartE2EDuration="1m1.565079355s" podCreationTimestamp="2025-12-05 12:27:59 +0000 UTC" firstStartedPulling="2025-12-05 12:28:04.011202055 +0000 UTC m=+163.431268870" lastFinishedPulling="2025-12-05 12:28:59.845452499 +0000 UTC m=+219.265519314" observedRunningTime="2025-12-05 12:29:00.563719778 +0000 UTC m=+219.983786603" watchObservedRunningTime="2025-12-05 12:29:00.565079355 +0000 UTC m=+219.985146170" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.617264 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ef4cc8a-191e-4e86-9c66-77bdaeca8015" (UID: "1ef4cc8a-191e-4e86-9c66-77bdaeca8015"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.646406 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tj8js\" (UniqueName: \"kubernetes.io/projected/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-kube-api-access-tj8js\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.646444 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:00 crc kubenswrapper[4784]: I1205 12:29:00.646454 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ef4cc8a-191e-4e86-9c66-77bdaeca8015-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.455165 4784 generic.go:334] "Generic (PLEG): container finished" podID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerID="f34482c5d65a7fda67646fe0f7c2ce10f444dbd5c7b2eb24164a74d9df64883a" exitCode=0 Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.455260 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerDied","Data":"f34482c5d65a7fda67646fe0f7c2ce10f444dbd5c7b2eb24164a74d9df64883a"} Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.458339 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-859cx" event={"ID":"1ef4cc8a-191e-4e86-9c66-77bdaeca8015","Type":"ContainerDied","Data":"c1c6df859a67784e946fa7bb46b83711054eee0b89f606efda1a238cac02bcfb"} Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.458402 4784 scope.go:117] "RemoveContainer" containerID="18b45db220f566ebf5287f2b5f2765dca75979e31d0a18fb62f57c391dfc2285" Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.458526 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-859cx" Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.480418 4784 scope.go:117] "RemoveContainer" containerID="017f3291667cb26ccdaed3d1116f006c036167984eeac09932a33eeba917b25d" Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.497223 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.505738 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-859cx"] Dec 05 12:29:01 crc kubenswrapper[4784]: I1205 12:29:01.511129 4784 scope.go:117] "RemoveContainer" containerID="7ad4df051fbef0e385c842f16be985a7cab5b3f8fe70a93f28eb6f709d5d5c67" Dec 05 12:29:02 crc kubenswrapper[4784]: I1205 12:29:02.471964 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerStarted","Data":"bec9e4c30f49ac133c09ada34f5fab04643eae2ca6eec5ec0fc4115963f5848c"} Dec 05 12:29:02 crc kubenswrapper[4784]: I1205 12:29:02.497550 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5gkgh" podStartSLOduration=5.647663665 podStartE2EDuration="1m3.497531691s" podCreationTimestamp="2025-12-05 12:27:59 +0000 UTC" firstStartedPulling="2025-12-05 12:28:04.004815294 +0000 UTC m=+163.424882109" lastFinishedPulling="2025-12-05 12:29:01.85468332 +0000 UTC m=+221.274750135" observedRunningTime="2025-12-05 12:29:02.495683609 +0000 UTC m=+221.915750434" watchObservedRunningTime="2025-12-05 12:29:02.497531691 +0000 UTC m=+221.917598506" Dec 05 12:29:03 crc kubenswrapper[4784]: I1205 12:29:03.007513 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" path="/var/lib/kubelet/pods/1ef4cc8a-191e-4e86-9c66-77bdaeca8015/volumes" Dec 05 12:29:06 crc kubenswrapper[4784]: I1205 12:29:06.739838 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:06 crc kubenswrapper[4784]: I1205 12:29:06.740371 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:06 crc kubenswrapper[4784]: I1205 12:29:06.780406 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:07 crc kubenswrapper[4784]: I1205 12:29:07.130007 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:07 crc kubenswrapper[4784]: I1205 12:29:07.130058 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:07 crc kubenswrapper[4784]: I1205 12:29:07.175626 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:07 crc kubenswrapper[4784]: I1205 12:29:07.539214 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:07 crc kubenswrapper[4784]: I1205 12:29:07.540817 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:08 crc kubenswrapper[4784]: I1205 12:29:08.431184 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.128659 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.128714 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.169649 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.511845 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-787vm" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="registry-server" containerID="cri-o://8540c31507a9c4a41c615447b31e96aae992e97ab7c16629b31d402455adcfa3" gracePeriod=2 Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.549414 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.916650 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.916703 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:09 crc kubenswrapper[4784]: I1205 12:29:09.955271 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:10 crc kubenswrapper[4784]: I1205 12:29:10.306134 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:10 crc kubenswrapper[4784]: I1205 12:29:10.306235 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:10 crc kubenswrapper[4784]: I1205 12:29:10.347839 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:10 crc kubenswrapper[4784]: I1205 12:29:10.557134 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:10 crc kubenswrapper[4784]: I1205 12:29:10.557554 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157280 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qmqnr"] Dec 05 12:29:12 crc kubenswrapper[4784]: E1205 12:29:12.157808 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="registry-server" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157822 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="registry-server" Dec 05 12:29:12 crc kubenswrapper[4784]: E1205 12:29:12.157834 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="extract-utilities" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157842 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="extract-utilities" Dec 05 12:29:12 crc kubenswrapper[4784]: E1205 12:29:12.157854 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="extract-content" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157861 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="extract-content" Dec 05 12:29:12 crc kubenswrapper[4784]: E1205 12:29:12.157871 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ee6b9e-80b7-41da-9f5c-980c8a27bea5" containerName="pruner" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157877 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ee6b9e-80b7-41da-9f5c-980c8a27bea5" containerName="pruner" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.157988 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ee6b9e-80b7-41da-9f5c-980c8a27bea5" containerName="pruner" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.158015 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ef4cc8a-191e-4e86-9c66-77bdaeca8015" containerName="registry-server" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.158501 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.191424 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qmqnr"] Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299324 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-certificates\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299397 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7spl\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-kube-api-access-x7spl\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299437 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-bound-sa-token\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299475 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299496 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59d0659e-87ef-4aad-b969-d841641f2e3e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299524 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59d0659e-87ef-4aad-b969-d841641f2e3e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299568 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-tls\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.299611 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-trusted-ca\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.319515 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401235 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-tls\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401580 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-trusted-ca\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401693 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-certificates\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401790 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7spl\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-kube-api-access-x7spl\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401883 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-bound-sa-token\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.401981 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59d0659e-87ef-4aad-b969-d841641f2e3e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.402063 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59d0659e-87ef-4aad-b969-d841641f2e3e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.402464 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59d0659e-87ef-4aad-b969-d841641f2e3e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.403155 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-certificates\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.403430 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59d0659e-87ef-4aad-b969-d841641f2e3e-trusted-ca\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.407118 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-registry-tls\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.412269 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59d0659e-87ef-4aad-b969-d841641f2e3e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.417809 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-bound-sa-token\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.417954 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7spl\" (UniqueName: \"kubernetes.io/projected/59d0659e-87ef-4aad-b969-d841641f2e3e-kube-api-access-x7spl\") pod \"image-registry-66df7c8f76-qmqnr\" (UID: \"59d0659e-87ef-4aad-b969-d841641f2e3e\") " pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.473585 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.833030 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.833786 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x9c8w" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="registry-server" containerID="cri-o://17e8b8cd92943fe966f43f8b257505397919f739ea316acec8ee2c14e6da03f3" gracePeriod=2 Dec 05 12:29:12 crc kubenswrapper[4784]: I1205 12:29:12.866954 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qmqnr"] Dec 05 12:29:12 crc kubenswrapper[4784]: W1205 12:29:12.874740 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59d0659e_87ef_4aad_b969_d841641f2e3e.slice/crio-6d1f677d9295b4dd0a63a4abd2b0d6d82416697b949519d14556515649507eac WatchSource:0}: Error finding container 6d1f677d9295b4dd0a63a4abd2b0d6d82416697b949519d14556515649507eac: Status 404 returned error can't find the container with id 6d1f677d9295b4dd0a63a4abd2b0d6d82416697b949519d14556515649507eac Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.032589 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.033168 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5gkgh" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="registry-server" containerID="cri-o://bec9e4c30f49ac133c09ada34f5fab04643eae2ca6eec5ec0fc4115963f5848c" gracePeriod=2 Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.533950 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" event={"ID":"59d0659e-87ef-4aad-b969-d841641f2e3e","Type":"ContainerStarted","Data":"6d1f677d9295b4dd0a63a4abd2b0d6d82416697b949519d14556515649507eac"} Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.535837 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-787vm_e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8/registry-server/0.log" Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.536940 4784 generic.go:334] "Generic (PLEG): container finished" podID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerID="8540c31507a9c4a41c615447b31e96aae992e97ab7c16629b31d402455adcfa3" exitCode=137 Dec 05 12:29:13 crc kubenswrapper[4784]: I1205 12:29:13.536987 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerDied","Data":"8540c31507a9c4a41c615447b31e96aae992e97ab7c16629b31d402455adcfa3"} Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.049433 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-787vm_e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8/registry-server/0.log" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.050387 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.127236 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities\") pod \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.127338 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content\") pod \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.127377 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml2jk\" (UniqueName: \"kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk\") pod \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\" (UID: \"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.129635 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities" (OuterVolumeSpecName: "utilities") pod "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" (UID: "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.146919 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk" (OuterVolumeSpecName: "kube-api-access-ml2jk") pod "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" (UID: "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8"). InnerVolumeSpecName "kube-api-access-ml2jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.218804 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" (UID: "e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.228712 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml2jk\" (UniqueName: \"kubernetes.io/projected/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-kube-api-access-ml2jk\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.228751 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.228768 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.543691 4784 generic.go:334] "Generic (PLEG): container finished" podID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerID="bec9e4c30f49ac133c09ada34f5fab04643eae2ca6eec5ec0fc4115963f5848c" exitCode=0 Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.543758 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerDied","Data":"bec9e4c30f49ac133c09ada34f5fab04643eae2ca6eec5ec0fc4115963f5848c"} Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.545124 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" event={"ID":"59d0659e-87ef-4aad-b969-d841641f2e3e","Type":"ContainerStarted","Data":"dfd96d2933cd07bf1e64eed03e1223a31ca0935df0052f4743947efe4c245eb9"} Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.545197 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.546795 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-787vm_e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8/registry-server/0.log" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.547412 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-787vm" event={"ID":"e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8","Type":"ContainerDied","Data":"79e0034c0288b83f255e99a2b16509bab6cd20ef90cc9b875878cce41e0a4a1b"} Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.547437 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-787vm" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.547473 4784 scope.go:117] "RemoveContainer" containerID="8540c31507a9c4a41c615447b31e96aae992e97ab7c16629b31d402455adcfa3" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.550062 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerID="17e8b8cd92943fe966f43f8b257505397919f739ea316acec8ee2c14e6da03f3" exitCode=0 Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.550107 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerDied","Data":"17e8b8cd92943fe966f43f8b257505397919f739ea316acec8ee2c14e6da03f3"} Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.561897 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" podStartSLOduration=2.561877925 podStartE2EDuration="2.561877925s" podCreationTimestamp="2025-12-05 12:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:14.56086196 +0000 UTC m=+233.980928785" watchObservedRunningTime="2025-12-05 12:29:14.561877925 +0000 UTC m=+233.981944740" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.569768 4784 scope.go:117] "RemoveContainer" containerID="6c63cf2b43b166598c8b91b5157e8fa3edc6eedc237be44a5a4edb99f3be15cd" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.586733 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.589387 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-787vm"] Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.606760 4784 scope.go:117] "RemoveContainer" containerID="711e8408b9095c73eb740204557a1fd10b7364c23c85d7c08cd4cc93476b63b1" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.869697 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.936717 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities\") pod \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.936789 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvbr6\" (UniqueName: \"kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6\") pod \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.936837 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content\") pod \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\" (UID: \"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1\") " Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.937800 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities" (OuterVolumeSpecName: "utilities") pod "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" (UID: "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.940720 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6" (OuterVolumeSpecName: "kube-api-access-nvbr6") pod "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" (UID: "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1"). InnerVolumeSpecName "kube-api-access-nvbr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:14 crc kubenswrapper[4784]: I1205 12:29:14.957248 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" (UID: "2f4fce3b-5270-43f8-9a95-b9c15beb8bd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.016258 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" path="/var/lib/kubelet/pods/e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8/volumes" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.038178 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.038219 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvbr6\" (UniqueName: \"kubernetes.io/projected/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-kube-api-access-nvbr6\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.038229 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.042895 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.138824 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7562b\" (UniqueName: \"kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b\") pod \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.138928 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content\") pod \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.138977 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities\") pod \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\" (UID: \"31ce912b-e8c6-4b90-b8ed-d2051dce1232\") " Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.139967 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities" (OuterVolumeSpecName: "utilities") pod "31ce912b-e8c6-4b90-b8ed-d2051dce1232" (UID: "31ce912b-e8c6-4b90-b8ed-d2051dce1232"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.142294 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b" (OuterVolumeSpecName: "kube-api-access-7562b") pod "31ce912b-e8c6-4b90-b8ed-d2051dce1232" (UID: "31ce912b-e8c6-4b90-b8ed-d2051dce1232"). InnerVolumeSpecName "kube-api-access-7562b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.240648 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.240692 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7562b\" (UniqueName: \"kubernetes.io/projected/31ce912b-e8c6-4b90-b8ed-d2051dce1232-kube-api-access-7562b\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.281927 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31ce912b-e8c6-4b90-b8ed-d2051dce1232" (UID: "31ce912b-e8c6-4b90-b8ed-d2051dce1232"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.342381 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31ce912b-e8c6-4b90-b8ed-d2051dce1232-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.559669 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x9c8w" event={"ID":"2f4fce3b-5270-43f8-9a95-b9c15beb8bd1","Type":"ContainerDied","Data":"94b81b48ad705d38763709d5a8825039e3df01417a0c8e53a145fbd24eab992c"} Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.559729 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x9c8w" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.559747 4784 scope.go:117] "RemoveContainer" containerID="17e8b8cd92943fe966f43f8b257505397919f739ea316acec8ee2c14e6da03f3" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.566012 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5gkgh" event={"ID":"31ce912b-e8c6-4b90-b8ed-d2051dce1232","Type":"ContainerDied","Data":"25c60e0ea5715d50fcdc1914d1aecf9d90c3b620537330e4b2cd935d614468b4"} Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.566286 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5gkgh" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.588383 4784 scope.go:117] "RemoveContainer" containerID="79cfb6e2ef67c1e82b5d7e84f865fd0662dbdd326a11f610bb9b4e274409c966" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.591980 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.601039 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x9c8w"] Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.607277 4784 scope.go:117] "RemoveContainer" containerID="f8056b1959f5eefb30415033ed3d8ace5dcc7209d7e3d1344e0b2cba921830ee" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.614208 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.622761 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5gkgh"] Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.623888 4784 scope.go:117] "RemoveContainer" containerID="bec9e4c30f49ac133c09ada34f5fab04643eae2ca6eec5ec0fc4115963f5848c" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.659242 4784 scope.go:117] "RemoveContainer" containerID="f34482c5d65a7fda67646fe0f7c2ce10f444dbd5c7b2eb24164a74d9df64883a" Dec 05 12:29:15 crc kubenswrapper[4784]: I1205 12:29:15.679417 4784 scope.go:117] "RemoveContainer" containerID="47d14e12ef59712ee4a3b75660d020bfa1dc6660fcbc3a528ed44086e5549508" Dec 05 12:29:17 crc kubenswrapper[4784]: I1205 12:29:17.005541 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" path="/var/lib/kubelet/pods/2f4fce3b-5270-43f8-9a95-b9c15beb8bd1/volumes" Dec 05 12:29:17 crc kubenswrapper[4784]: I1205 12:29:17.007436 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" path="/var/lib/kubelet/pods/31ce912b-e8c6-4b90-b8ed-d2051dce1232/volumes" Dec 05 12:29:18 crc kubenswrapper[4784]: I1205 12:29:18.238246 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kld95"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.918874 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sf2pd"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.919783 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sf2pd" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="registry-server" containerID="cri-o://f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d" gracePeriod=30 Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.924041 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pd864"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.924517 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pd864" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="registry-server" containerID="cri-o://b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5" gracePeriod=30 Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.936576 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-txh4x"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.937259 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" containerID="cri-o://864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227" gracePeriod=30 Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.940621 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hkkm9"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.940845 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hkkm9" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="registry-server" containerID="cri-o://2ef1331e31888aa89cecb3fb44bc46be65bf27fdc801d727e6596e3f734de92d" gracePeriod=30 Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.948365 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vlvmb"] Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.948630 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vlvmb" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="registry-server" containerID="cri-o://14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75" gracePeriod=30 Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953376 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfdl"] Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953610 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953625 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953636 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953643 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953657 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953663 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953672 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953678 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953688 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953693 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953703 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953709 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="extract-utilities" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953719 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953725 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="extract-content" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953734 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953740 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: E1205 12:29:21.953752 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953758 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953842 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e73d649d-8c9a-48e9-b5e3-b0d5108a1cb8" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953858 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f4fce3b-5270-43f8-9a95-b9c15beb8bd1" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.953867 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="31ce912b-e8c6-4b90-b8ed-d2051dce1232" containerName="registry-server" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.954368 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:21 crc kubenswrapper[4784]: I1205 12:29:21.967875 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfdl"] Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.035004 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75l74\" (UniqueName: \"kubernetes.io/projected/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-kube-api-access-75l74\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.036477 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.036520 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.137930 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.137980 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.138011 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75l74\" (UniqueName: \"kubernetes.io/projected/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-kube-api-access-75l74\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.139688 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.147936 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.165959 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75l74\" (UniqueName: \"kubernetes.io/projected/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb-kube-api-access-75l74\") pod \"marketplace-operator-79b997595-kvfdl\" (UID: \"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.277496 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.333078 4784 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.333849 4784 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.333970 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.334659 4784 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335274 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335297 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335312 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335322 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335347 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335357 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335371 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335378 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335472 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335482 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335492 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335500 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.335514 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335534 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335658 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335674 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335688 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335698 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335708 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.335718 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.339732 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7" gracePeriod=15 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.339932 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89" gracePeriod=15 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.339934 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13" gracePeriod=15 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.340034 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e" gracePeriod=15 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.340042 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505" gracePeriod=15 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442756 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442812 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442858 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442906 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442929 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442953 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.442993 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.443019 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.499446 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.501413 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.502035 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.513358 4784 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.223:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.545822 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f4gv\" (UniqueName: \"kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv\") pod \"f6a64333-7a87-462a-996f-b3ce85e43c8f\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.545979 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content\") pod \"f6a64333-7a87-462a-996f-b3ce85e43c8f\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.546984 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities\") pod \"f6a64333-7a87-462a-996f-b3ce85e43c8f\" (UID: \"f6a64333-7a87-462a-996f-b3ce85e43c8f\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547654 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547670 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities" (OuterVolumeSpecName: "utilities") pod "f6a64333-7a87-462a-996f-b3ce85e43c8f" (UID: "f6a64333-7a87-462a-996f-b3ce85e43c8f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547812 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547842 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547881 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547934 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.547975 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548035 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548061 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548092 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548125 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548141 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548147 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548172 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548230 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548254 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548271 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.548286 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.554422 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv" (OuterVolumeSpecName: "kube-api-access-7f4gv") pod "f6a64333-7a87-462a-996f-b3ce85e43c8f" (UID: "f6a64333-7a87-462a-996f-b3ce85e43c8f"). InnerVolumeSpecName "kube-api-access-7f4gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.601582 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.602169 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.602878 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.603096 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.610226 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.610801 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.611155 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.611394 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.611548 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.615964 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.616604 4784 generic.go:334] "Generic (PLEG): container finished" podID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerID="2ef1331e31888aa89cecb3fb44bc46be65bf27fdc801d727e6596e3f734de92d" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.616707 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerDied","Data":"2ef1331e31888aa89cecb3fb44bc46be65bf27fdc801d727e6596e3f734de92d"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.616774 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hkkm9" event={"ID":"6d81f1d0-3e85-443a-a738-2e0d9302d327","Type":"ContainerDied","Data":"439a9d55b10105aabca347dbf4ebd885e6520d633ba65f17d0c6958c1d4bc84d"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.616791 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="439a9d55b10105aabca347dbf4ebd885e6520d633ba65f17d0c6958c1d4bc84d" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.618743 4784 generic.go:334] "Generic (PLEG): container finished" podID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" containerID="ecbf27aef13c40bf29d3a984c126a1ebb575be7acd546abc0ea4a42e55a21d9b" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.618814 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22","Type":"ContainerDied","Data":"ecbf27aef13c40bf29d3a984c126a1ebb575be7acd546abc0ea4a42e55a21d9b"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.619670 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.621165 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.622054 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.621409 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.622648 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f6a64333-7a87-462a-996f-b3ce85e43c8f" (UID: "f6a64333-7a87-462a-996f-b3ce85e43c8f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.623241 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.623654 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624349 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624674 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624902 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624946 4784 scope.go:117] "RemoveContainer" containerID="f4d426cbf267bb8fb55c63b91ec9cbb1958fed967cf58b53e75a250d96857c3f" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624926 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624964 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.624971 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89" exitCode=2 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.625558 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.625820 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.626120 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.626435 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.626692 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.627049 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.627247 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.628002 4784 generic.go:334] "Generic (PLEG): container finished" podID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerID="14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.628051 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerDied","Data":"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.628056 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlvmb" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.628070 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlvmb" event={"ID":"626019ff-24ba-4b81-b6ad-ba7c7085fa55","Type":"ContainerDied","Data":"9c8fbadb000f04d87d83c5c109b197f6cfe898ed22830c7225cff0880da829a6"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.630410 4784 generic.go:334] "Generic (PLEG): container finished" podID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerID="b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.630452 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pd864" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.630483 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerDied","Data":"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.630800 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pd864" event={"ID":"f6a64333-7a87-462a-996f-b3ce85e43c8f","Type":"ContainerDied","Data":"13e30e863f50f2833454151cc62d9d7b67fb7746ab8743228497f73d09ac8e07"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.630121 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.633948 4784 generic.go:334] "Generic (PLEG): container finished" podID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerID="864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.633988 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.634041 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" event={"ID":"e350bf27-d60f-4f5f-9bc0-460e997fed0c","Type":"ContainerDied","Data":"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.634066 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" event={"ID":"e350bf27-d60f-4f5f-9bc0-460e997fed0c","Type":"ContainerDied","Data":"c365ccb450f0939c2096353163f9f8a9b5d5407e9d58d9a8840ad27fd3fb870e"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.637506 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.637844 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638104 4784 generic.go:334] "Generic (PLEG): container finished" podID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerID="f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d" exitCode=0 Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638150 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerDied","Data":"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638177 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sf2pd" event={"ID":"be2166c8-c56d-46bd-ac93-e6eeb11ecba6","Type":"ContainerDied","Data":"29bd7aaacf9624a49d7f199b85e377c52bff4918f7d9f7818a8991a0b014fad9"} Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638171 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638263 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sf2pd" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638439 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638671 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.638964 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.639842 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.640059 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.640275 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.640461 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.640677 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.640845 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.641610 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.645022 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.645427 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.645787 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.646046 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.646393 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.646670 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.646891 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649237 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics\") pod \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649311 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content\") pod \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649330 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities\") pod \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649382 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xbq9\" (UniqueName: \"kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9\") pod \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649409 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca\") pod \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649430 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities\") pod \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649449 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8bzh\" (UniqueName: \"kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh\") pod \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\" (UID: \"e350bf27-d60f-4f5f-9bc0-460e997fed0c\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649480 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content\") pod \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\" (UID: \"626019ff-24ba-4b81-b6ad-ba7c7085fa55\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649527 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lxnn\" (UniqueName: \"kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn\") pod \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\" (UID: \"be2166c8-c56d-46bd-ac93-e6eeb11ecba6\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649723 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f4gv\" (UniqueName: \"kubernetes.io/projected/f6a64333-7a87-462a-996f-b3ce85e43c8f-kube-api-access-7f4gv\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.649739 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6a64333-7a87-462a-996f-b3ce85e43c8f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.650576 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "e350bf27-d60f-4f5f-9bc0-460e997fed0c" (UID: "e350bf27-d60f-4f5f-9bc0-460e997fed0c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.650621 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities" (OuterVolumeSpecName: "utilities") pod "be2166c8-c56d-46bd-ac93-e6eeb11ecba6" (UID: "be2166c8-c56d-46bd-ac93-e6eeb11ecba6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.651394 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities" (OuterVolumeSpecName: "utilities") pod "626019ff-24ba-4b81-b6ad-ba7c7085fa55" (UID: "626019ff-24ba-4b81-b6ad-ba7c7085fa55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.652177 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn" (OuterVolumeSpecName: "kube-api-access-2lxnn") pod "be2166c8-c56d-46bd-ac93-e6eeb11ecba6" (UID: "be2166c8-c56d-46bd-ac93-e6eeb11ecba6"). InnerVolumeSpecName "kube-api-access-2lxnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.654049 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9" (OuterVolumeSpecName: "kube-api-access-4xbq9") pod "626019ff-24ba-4b81-b6ad-ba7c7085fa55" (UID: "626019ff-24ba-4b81-b6ad-ba7c7085fa55"). InnerVolumeSpecName "kube-api-access-4xbq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.655880 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh" (OuterVolumeSpecName: "kube-api-access-v8bzh") pod "e350bf27-d60f-4f5f-9bc0-460e997fed0c" (UID: "e350bf27-d60f-4f5f-9bc0-460e997fed0c"). InnerVolumeSpecName "kube-api-access-v8bzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.657505 4784 scope.go:117] "RemoveContainer" containerID="14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.657516 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "e350bf27-d60f-4f5f-9bc0-460e997fed0c" (UID: "e350bf27-d60f-4f5f-9bc0-460e997fed0c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.675528 4784 scope.go:117] "RemoveContainer" containerID="d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.717696 4784 scope.go:117] "RemoveContainer" containerID="580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.726963 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be2166c8-c56d-46bd-ac93-e6eeb11ecba6" (UID: "be2166c8-c56d-46bd-ac93-e6eeb11ecba6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.749197 4784 scope.go:117] "RemoveContainer" containerID="14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.750146 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75\": container with ID starting with 14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75 not found: ID does not exist" containerID="14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750176 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75"} err="failed to get container status \"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75\": rpc error: code = NotFound desc = could not find container \"14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75\": container with ID starting with 14a7bac8fa2bc9b62f356d65c56b04451bf492b1a49ddbf84785cb8d725eff75 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750211 4784 scope.go:117] "RemoveContainer" containerID="d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750473 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxshj\" (UniqueName: \"kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj\") pod \"6d81f1d0-3e85-443a-a738-2e0d9302d327\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750528 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content\") pod \"6d81f1d0-3e85-443a-a738-2e0d9302d327\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750595 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities\") pod \"6d81f1d0-3e85-443a-a738-2e0d9302d327\" (UID: \"6d81f1d0-3e85-443a-a738-2e0d9302d327\") " Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750816 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lxnn\" (UniqueName: \"kubernetes.io/projected/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-kube-api-access-2lxnn\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750832 4784 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750841 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750852 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be2166c8-c56d-46bd-ac93-e6eeb11ecba6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750860 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xbq9\" (UniqueName: \"kubernetes.io/projected/626019ff-24ba-4b81-b6ad-ba7c7085fa55-kube-api-access-4xbq9\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750868 4784 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e350bf27-d60f-4f5f-9bc0-460e997fed0c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750878 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.750886 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8bzh\" (UniqueName: \"kubernetes.io/projected/e350bf27-d60f-4f5f-9bc0-460e997fed0c-kube-api-access-v8bzh\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.751570 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities" (OuterVolumeSpecName: "utilities") pod "6d81f1d0-3e85-443a-a738-2e0d9302d327" (UID: "6d81f1d0-3e85-443a-a738-2e0d9302d327"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.751658 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b\": container with ID starting with d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b not found: ID does not exist" containerID="d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.751684 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b"} err="failed to get container status \"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b\": rpc error: code = NotFound desc = could not find container \"d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b\": container with ID starting with d35997b0b696b14377b9e345a901049390a07a8b53b0a05206a91841e23c458b not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.751710 4784 scope.go:117] "RemoveContainer" containerID="580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.751984 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751\": container with ID starting with 580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751 not found: ID does not exist" containerID="580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.752007 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751"} err="failed to get container status \"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751\": rpc error: code = NotFound desc = could not find container \"580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751\": container with ID starting with 580130158a4a16e8670ad4ee16d3ee4a1b48d98dc77d03681caea62f6f797751 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.752020 4784 scope.go:117] "RemoveContainer" containerID="b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.753942 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj" (OuterVolumeSpecName: "kube-api-access-pxshj") pod "6d81f1d0-3e85-443a-a738-2e0d9302d327" (UID: "6d81f1d0-3e85-443a-a738-2e0d9302d327"). InnerVolumeSpecName "kube-api-access-pxshj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.768344 4784 scope.go:117] "RemoveContainer" containerID="e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.771249 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d81f1d0-3e85-443a-a738-2e0d9302d327" (UID: "6d81f1d0-3e85-443a-a738-2e0d9302d327"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.784107 4784 scope.go:117] "RemoveContainer" containerID="a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.796340 4784 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 05 12:29:22 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:22 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:22 crc kubenswrapper[4784]: > Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.796403 4784 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 05 12:29:22 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:22 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:22 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.796428 4784 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 05 12:29:22 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:22 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:22 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.796482 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10\\\" Netns:\\\"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s\\\": dial tcp 38.102.83.223:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.796976 4784 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.223:6443: connect: connection refused" event=< Dec 05 12:29:22 crc kubenswrapper[4784]: &Event{ObjectMeta:{marketplace-operator-79b997595-kvfdl.187e518b52279e9e openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-kvfdl,UID:4825dcfb-cf17-4a0d-b4f2-4f46c87beccb,APIVersion:v1,ResourceVersion:29402,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:22 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,LastTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 05 12:29:22 crc kubenswrapper[4784]: > Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.807560 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "626019ff-24ba-4b81-b6ad-ba7c7085fa55" (UID: "626019ff-24ba-4b81-b6ad-ba7c7085fa55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.812127 4784 scope.go:117] "RemoveContainer" containerID="b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.812561 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5\": container with ID starting with b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5 not found: ID does not exist" containerID="b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.812606 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5"} err="failed to get container status \"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5\": rpc error: code = NotFound desc = could not find container \"b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5\": container with ID starting with b23c758ac5a209a9f82e397181b091cbaf0525daf74352a342877dd0ec26b1d5 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.812633 4784 scope.go:117] "RemoveContainer" containerID="e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.813133 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e\": container with ID starting with e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e not found: ID does not exist" containerID="e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.813174 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e"} err="failed to get container status \"e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e\": rpc error: code = NotFound desc = could not find container \"e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e\": container with ID starting with e8ef2134729240e22227a1a94d0a73f7766fe12caeb9ef2e68b414b56413799e not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.813233 4784 scope.go:117] "RemoveContainer" containerID="a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.813519 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6\": container with ID starting with a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6 not found: ID does not exist" containerID="a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.813557 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6"} err="failed to get container status \"a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6\": rpc error: code = NotFound desc = could not find container \"a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6\": container with ID starting with a1ca080e06c90d50029e3e053bb842439e8a65280503413377d5f8cf766f9fa6 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.813572 4784 scope.go:117] "RemoveContainer" containerID="864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.814099 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.841140 4784 scope.go:117] "RemoveContainer" containerID="864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.841560 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227\": container with ID starting with 864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227 not found: ID does not exist" containerID="864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.841598 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227"} err="failed to get container status \"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227\": rpc error: code = NotFound desc = could not find container \"864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227\": container with ID starting with 864a6fa223f0bf8039b10ef73f169f800bc5f0c4b7da73faadd496f24b37e227 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.841625 4784 scope.go:117] "RemoveContainer" containerID="f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d" Dec 05 12:29:22 crc kubenswrapper[4784]: W1205 12:29:22.851820 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-343b0dd9ff2a2a23edef98c390b62bb14afa913f948c6265c527e9d716b7dadb WatchSource:0}: Error finding container 343b0dd9ff2a2a23edef98c390b62bb14afa913f948c6265c527e9d716b7dadb: Status 404 returned error can't find the container with id 343b0dd9ff2a2a23edef98c390b62bb14afa913f948c6265c527e9d716b7dadb Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.852083 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/626019ff-24ba-4b81-b6ad-ba7c7085fa55-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.852152 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxshj\" (UniqueName: \"kubernetes.io/projected/6d81f1d0-3e85-443a-a738-2e0d9302d327-kube-api-access-pxshj\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.852171 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.852205 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d81f1d0-3e85-443a-a738-2e0d9302d327-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.855101 4784 scope.go:117] "RemoveContainer" containerID="3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.881025 4784 scope.go:117] "RemoveContainer" containerID="890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.893709 4784 scope.go:117] "RemoveContainer" containerID="f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.894075 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d\": container with ID starting with f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d not found: ID does not exist" containerID="f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.894121 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d"} err="failed to get container status \"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d\": rpc error: code = NotFound desc = could not find container \"f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d\": container with ID starting with f7a5cf2484f7c19b4ba414fc32937ef7a46ee64751934d249ab88c46b822e19d not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.894164 4784 scope.go:117] "RemoveContainer" containerID="3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.894676 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012\": container with ID starting with 3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012 not found: ID does not exist" containerID="3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.894711 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012"} err="failed to get container status \"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012\": rpc error: code = NotFound desc = could not find container \"3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012\": container with ID starting with 3721ab85524db2d44e3225cf74decc2e9d23150f795de83e620f182c76b8c012 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.894732 4784 scope.go:117] "RemoveContainer" containerID="890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306" Dec 05 12:29:22 crc kubenswrapper[4784]: E1205 12:29:22.895111 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306\": container with ID starting with 890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306 not found: ID does not exist" containerID="890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.895147 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306"} err="failed to get container status \"890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306\": rpc error: code = NotFound desc = could not find container \"890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306\": container with ID starting with 890a75c9b5d19011f1b69920373134a1e5a03653c203c7053b464e4fb7e0d306 not found: ID does not exist" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.955962 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.956304 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.956534 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.956783 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.957047 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.957512 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.957804 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.958128 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.958414 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.958639 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.958830 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.959030 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.959235 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.959635 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.959910 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.960158 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.960445 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.960735 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.960979 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.961257 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:22 crc kubenswrapper[4784]: I1205 12:29:22.961529 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.651225 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.653658 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"1aab79e45ce0d338db945a1de844d1aec110d3bcdf6dfc35455ca82398492c06"} Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.653707 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"343b0dd9ff2a2a23edef98c390b62bb14afa913f948c6265c527e9d716b7dadb"} Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.653744 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hkkm9" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.653891 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.654695 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.654873 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.654868 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.655376 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.655648 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.655918 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.656102 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.656341 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.656555 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.656725 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.656883 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.657039 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.657222 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.660450 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: E1205 12:29:23.660551 4784 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.223:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.660678 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.660870 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.661159 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.662109 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.662422 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.882038 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.882760 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.883256 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.883625 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.884094 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.884404 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.884694 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965075 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock\") pod \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965153 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir\") pod \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965177 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock" (OuterVolumeSpecName: "var-lock") pod "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" (UID: "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965247 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access\") pod \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\" (UID: \"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22\") " Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965312 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" (UID: "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965688 4784 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.965705 4784 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:23 crc kubenswrapper[4784]: I1205 12:29:23.970570 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" (UID: "390fc8b7-86b2-4ecc-a41f-1fffd11b1a22"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.067286 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/390fc8b7-86b2-4ecc-a41f-1fffd11b1a22-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:24 crc kubenswrapper[4784]: E1205 12:29:24.236251 4784 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 05 12:29:24 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506" Netns:"/var/run/netns/d097ec6d-16b2-485c-b6de-8a513b9239de" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:24 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:24 crc kubenswrapper[4784]: > Dec 05 12:29:24 crc kubenswrapper[4784]: E1205 12:29:24.236577 4784 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 05 12:29:24 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506" Netns:"/var/run/netns/d097ec6d-16b2-485c-b6de-8a513b9239de" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:24 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:24 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:24 crc kubenswrapper[4784]: E1205 12:29:24.236607 4784 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 05 12:29:24 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506" Netns:"/var/run/netns/d097ec6d-16b2-485c-b6de-8a513b9239de" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:24 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:24 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:24 crc kubenswrapper[4784]: E1205 12:29:24.236908 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506\\\" Netns:\\\"/var/run/netns/d097ec6d-16b2-485c-b6de-8a513b9239de\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=5dcfe2efe837a6c836fe8add17269e32ac2f45b5c3abaa5a3b2fc7744426f506;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s\\\": dial tcp 38.102.83.223:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.660322 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"390fc8b7-86b2-4ecc-a41f-1fffd11b1a22","Type":"ContainerDied","Data":"6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece"} Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.660597 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6985e4d79c09675da823ff1033185655fb81093533f38b062707729841bd3ece" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.660373 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.700465 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.700669 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.700908 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.701101 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.701473 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.701893 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.702911 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.703549 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.703889 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.704231 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.704517 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.704908 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.705239 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.705460 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.705689 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.774813 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.774886 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.774900 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.774938 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.775011 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.775104 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.775220 4784 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.775235 4784 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:24 crc kubenswrapper[4784]: I1205 12:29:24.775243 4784 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.005564 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.674612 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.675452 4784 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7" exitCode=0 Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.675522 4784 scope.go:117] "RemoveContainer" containerID="a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.675542 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.676376 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.676706 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.676892 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.678330 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.678794 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.679021 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.679209 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.679484 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.679649 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.679843 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.680009 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.680224 4784 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.680442 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.680814 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.694165 4784 scope.go:117] "RemoveContainer" containerID="c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.705883 4784 scope.go:117] "RemoveContainer" containerID="ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.720327 4784 scope.go:117] "RemoveContainer" containerID="6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.735650 4784 scope.go:117] "RemoveContainer" containerID="b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.751762 4784 scope.go:117] "RemoveContainer" containerID="1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.769469 4784 scope.go:117] "RemoveContainer" containerID="a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.769919 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\": container with ID starting with a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e not found: ID does not exist" containerID="a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.769988 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e"} err="failed to get container status \"a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\": rpc error: code = NotFound desc = could not find container \"a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e\": container with ID starting with a1315abec8017870aa470f7fc9919d3ac9722ab273567e0e12baa8405ede002e not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.770077 4784 scope.go:117] "RemoveContainer" containerID="c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.770782 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\": container with ID starting with c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505 not found: ID does not exist" containerID="c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.770809 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505"} err="failed to get container status \"c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\": rpc error: code = NotFound desc = could not find container \"c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505\": container with ID starting with c801f273c3e86fed6eeffa1d1d998a5c820f0d45767482fd2c8681df66082505 not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.770827 4784 scope.go:117] "RemoveContainer" containerID="ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.771082 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\": container with ID starting with ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13 not found: ID does not exist" containerID="ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.771103 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13"} err="failed to get container status \"ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\": rpc error: code = NotFound desc = could not find container \"ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13\": container with ID starting with ec6963791239485587bfde866601e5747e0624923c4acd5ce882ffed48b76d13 not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.771116 4784 scope.go:117] "RemoveContainer" containerID="6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.773494 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\": container with ID starting with 6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89 not found: ID does not exist" containerID="6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.773546 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89"} err="failed to get container status \"6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\": rpc error: code = NotFound desc = could not find container \"6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89\": container with ID starting with 6a40010818ed72477fc079dd1f9618323ed81bf5a797c2bbed14477a0aa41c89 not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.773567 4784 scope.go:117] "RemoveContainer" containerID="b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.773846 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\": container with ID starting with b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7 not found: ID does not exist" containerID="b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.773882 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7"} err="failed to get container status \"b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\": rpc error: code = NotFound desc = could not find container \"b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7\": container with ID starting with b67b8f6c44f6b3c2a4388d1644196db2f49f125c21ffac278b1217b260230eb7 not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.773903 4784 scope.go:117] "RemoveContainer" containerID="1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.774223 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\": container with ID starting with 1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72 not found: ID does not exist" containerID="1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72" Dec 05 12:29:25 crc kubenswrapper[4784]: I1205 12:29:25.774243 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72"} err="failed to get container status \"1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\": rpc error: code = NotFound desc = could not find container \"1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72\": container with ID starting with 1a9d5bf0b8f70b88fa3e223d97875f5b7d8dafe2e4077c67f1943a3a331bba72 not found: ID does not exist" Dec 05 12:29:25 crc kubenswrapper[4784]: E1205 12:29:25.833072 4784 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.223:6443: connect: connection refused" event=< Dec 05 12:29:25 crc kubenswrapper[4784]: &Event{ObjectMeta:{marketplace-operator-79b997595-kvfdl.187e518b52279e9e openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-kvfdl,UID:4825dcfb-cf17-4a0d-b4f2-4f46c87beccb,APIVersion:v1,ResourceVersion:29402,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:25 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,LastTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 05 12:29:25 crc kubenswrapper[4784]: > Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.451103 4784 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.451416 4784 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.451590 4784 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.451761 4784 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.451904 4784 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:26 crc kubenswrapper[4784]: I1205 12:29:26.451921 4784 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.452058 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="200ms" Dec 05 12:29:26 crc kubenswrapper[4784]: E1205 12:29:26.653285 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="400ms" Dec 05 12:29:27 crc kubenswrapper[4784]: E1205 12:29:27.054855 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="800ms" Dec 05 12:29:27 crc kubenswrapper[4784]: E1205 12:29:27.855997 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="1.6s" Dec 05 12:29:29 crc kubenswrapper[4784]: E1205 12:29:29.457964 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="3.2s" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.003173 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.003809 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.004375 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.004746 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.005117 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:31 crc kubenswrapper[4784]: I1205 12:29:31.005486 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.481523 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.482057 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.482340 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.482651 4784 status_manager.go:851] "Failed to get status for pod" podUID="59d0659e-87ef-4aad-b969-d841641f2e3e" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-qmqnr\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.482879 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.483090 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.483319 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: I1205 12:29:32.483558 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:32 crc kubenswrapper[4784]: E1205 12:29:32.549605 4784 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.223:6443: connect: connection refused" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" volumeName="registry-storage" Dec 05 12:29:32 crc kubenswrapper[4784]: E1205 12:29:32.658441 4784 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.223:6443: connect: connection refused" interval="6.4s" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.730215 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.730830 4784 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219" exitCode=1 Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.730866 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219"} Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.731443 4784 scope.go:117] "RemoveContainer" containerID="26b8b6d1b4a134918a525e2abec165a0c72265eab6c2a1e537094d1696a2e219" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.731837 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.732120 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.732373 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.732630 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.732995 4784 status_manager.go:851] "Failed to get status for pod" podUID="59d0659e-87ef-4aad-b969-d841641f2e3e" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-qmqnr\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.733448 4784 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.733776 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.734236 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.998225 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:34 crc kubenswrapper[4784]: I1205 12:29:34.999305 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.078546 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:29:35 crc kubenswrapper[4784]: E1205 12:29:35.600345 4784 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 05 12:29:35 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424" Netns:"/var/run/netns/76e91f69-7c8b-4132-864d-3bac667fe45e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:35 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:35 crc kubenswrapper[4784]: > Dec 05 12:29:35 crc kubenswrapper[4784]: E1205 12:29:35.600717 4784 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 05 12:29:35 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424" Netns:"/var/run/netns/76e91f69-7c8b-4132-864d-3bac667fe45e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:35 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:35 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:35 crc kubenswrapper[4784]: E1205 12:29:35.600740 4784 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 05 12:29:35 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424" Netns:"/var/run/netns/76e91f69-7c8b-4132-864d-3bac667fe45e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:35 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:29:35 crc kubenswrapper[4784]: > pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:35 crc kubenswrapper[4784]: E1205 12:29:35.600805 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424\\\" Netns:\\\"/var/run/netns/76e91f69-7c8b-4132-864d-3bac667fe45e\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=bce6ebfe6b07829988e87d7498fee5287b38435f84efb94f719c9f94372d8424;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s\\\": dial tcp 38.102.83.223:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.745760 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.745832 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c71b73da985f52a40e156b6b25cb4b041c7f490f27326fbc983420d19ff14529"} Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.747075 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.747517 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.747943 4784 status_manager.go:851] "Failed to get status for pod" podUID="59d0659e-87ef-4aad-b969-d841641f2e3e" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-qmqnr\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.748436 4784 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.748752 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.749080 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.749398 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: I1205 12:29:35.749759 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:35 crc kubenswrapper[4784]: E1205 12:29:35.834631 4784 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.223:6443: connect: connection refused" event=< Dec 05 12:29:35 crc kubenswrapper[4784]: &Event{ObjectMeta:{marketplace-operator-79b997595-kvfdl.187e518b52279e9e openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-kvfdl,UID:4825dcfb-cf17-4a0d-b4f2-4f46c87beccb,APIVersion:v1,ResourceVersion:29402,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-kvfdl_openshift-marketplace_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb_0(a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10): error adding pod openshift-marketplace_marketplace-operator-79b997595-kvfdl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10" Netns:"/var/run/netns/4ffd8df7-96ec-40e6-97cb-cc796a382b28" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-kvfdl;K8S_POD_INFRA_CONTAINER_ID=a7b212dc0d75a287b961f7a0ff821d5ea7980a09993505a9d4a341279dc7dc10;K8S_POD_UID=4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-kvfdl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-kvfdl/4825dcfb-cf17-4a0d-b4f2-4f46c87beccb]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-kvfdl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-kvfdl?timeout=1m0s": dial tcp 38.102.83.223:6443: connect: connection refused Dec 05 12:29:35 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,LastTimestamp:2025-12-05 12:29:22.796445342 +0000 UTC m=+242.216512157,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 05 12:29:35 crc kubenswrapper[4784]: > Dec 05 12:29:36 crc kubenswrapper[4784]: I1205 12:29:36.540426 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:29:36 crc kubenswrapper[4784]: I1205 12:29:36.998412 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:36 crc kubenswrapper[4784]: I1205 12:29:36.999460 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:36 crc kubenswrapper[4784]: I1205 12:29:36.999826 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.000217 4784 status_manager.go:851] "Failed to get status for pod" podUID="59d0659e-87ef-4aad-b969-d841641f2e3e" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-qmqnr\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.000744 4784 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.001082 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.001343 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.001622 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.001845 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.020314 4784 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.020354 4784 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:37 crc kubenswrapper[4784]: E1205 12:29:37.020822 4784 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.021272 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:37 crc kubenswrapper[4784]: W1205 12:29:37.045725 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-b15b320969ec2b67a23903a2f91bad44e88f1632799ca169bbd96cb293b901e0 WatchSource:0}: Error finding container b15b320969ec2b67a23903a2f91bad44e88f1632799ca169bbd96cb293b901e0: Status 404 returned error can't find the container with id b15b320969ec2b67a23903a2f91bad44e88f1632799ca169bbd96cb293b901e0 Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.755779 4784 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="448f18e2b06a91c70cb9d3cda21b9821bb09fbab4cf0bb776c00bf5bf7508a62" exitCode=0 Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.755890 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"448f18e2b06a91c70cb9d3cda21b9821bb09fbab4cf0bb776c00bf5bf7508a62"} Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.756112 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b15b320969ec2b67a23903a2f91bad44e88f1632799ca169bbd96cb293b901e0"} Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.756559 4784 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.756587 4784 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:37 crc kubenswrapper[4784]: E1205 12:29:37.757021 4784 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.757063 4784 status_manager.go:851] "Failed to get status for pod" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" pod="openshift-marketplace/community-operators-pd864" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pd864\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.757546 4784 status_manager.go:851] "Failed to get status for pod" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.757782 4784 status_manager.go:851] "Failed to get status for pod" podUID="59d0659e-87ef-4aad-b969-d841641f2e3e" pod="openshift-image-registry/image-registry-66df7c8f76-qmqnr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-qmqnr\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.758044 4784 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.758266 4784 status_manager.go:851] "Failed to get status for pod" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" pod="openshift-marketplace/marketplace-operator-79b997595-txh4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-txh4x\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.758493 4784 status_manager.go:851] "Failed to get status for pod" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" pod="openshift-marketplace/redhat-operators-vlvmb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vlvmb\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.758773 4784 status_manager.go:851] "Failed to get status for pod" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" pod="openshift-marketplace/redhat-marketplace-hkkm9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-hkkm9\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:37 crc kubenswrapper[4784]: I1205 12:29:37.759057 4784 status_manager.go:851] "Failed to get status for pod" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" pod="openshift-marketplace/certified-operators-sf2pd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-sf2pd\": dial tcp 38.102.83.223:6443: connect: connection refused" Dec 05 12:29:38 crc kubenswrapper[4784]: I1205 12:29:38.768649 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"426864b463ad13dca402ffde7553ac0a6d02813fdcfa62d0991126cffe5b417f"} Dec 05 12:29:38 crc kubenswrapper[4784]: I1205 12:29:38.768968 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ed503f74c145c215283762b2ece96270d7283a2adb7e3e5a5dc239b30c90b9c0"} Dec 05 12:29:38 crc kubenswrapper[4784]: I1205 12:29:38.768981 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"462a6c7e980d94f5d387f0e1e0036ff64fa2b7540068a9491be1bb916a1827b3"} Dec 05 12:29:38 crc kubenswrapper[4784]: I1205 12:29:38.768990 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d072774a0a1fe99217277caf47ca9a6b46c66dd41d596653083775d24e66a6cd"} Dec 05 12:29:39 crc kubenswrapper[4784]: I1205 12:29:39.777123 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"51c00af5a9d389c5128231301aca74feb7900017a2d773252c6d28da6fc79350"} Dec 05 12:29:39 crc kubenswrapper[4784]: I1205 12:29:39.777490 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:39 crc kubenswrapper[4784]: I1205 12:29:39.777515 4784 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:39 crc kubenswrapper[4784]: I1205 12:29:39.777548 4784 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:42 crc kubenswrapper[4784]: I1205 12:29:42.021623 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:42 crc kubenswrapper[4784]: I1205 12:29:42.021669 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:42 crc kubenswrapper[4784]: I1205 12:29:42.026851 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:43 crc kubenswrapper[4784]: I1205 12:29:43.272704 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerName="oauth-openshift" containerID="cri-o://b9970169ea79c86d085d07d939b108567ec2b85e42b6c08645c31f4c1fbfa6c9" gracePeriod=15 Dec 05 12:29:43 crc kubenswrapper[4784]: I1205 12:29:43.797258 4784 generic.go:334] "Generic (PLEG): container finished" podID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerID="b9970169ea79c86d085d07d939b108567ec2b85e42b6c08645c31f4c1fbfa6c9" exitCode=0 Dec 05 12:29:43 crc kubenswrapper[4784]: I1205 12:29:43.797426 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" event={"ID":"da405e0d-550f-42a2-8b4e-a387eabb8e0a","Type":"ContainerDied","Data":"b9970169ea79c86d085d07d939b108567ec2b85e42b6c08645c31f4c1fbfa6c9"} Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.139174 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234288 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234340 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234364 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234382 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234436 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234467 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234483 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234507 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234543 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234562 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234597 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234620 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234632 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234651 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k79jm\" (UniqueName: \"kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm\") pod \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\" (UID: \"da405e0d-550f-42a2-8b4e-a387eabb8e0a\") " Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.234683 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.235535 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.235830 4784 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.235845 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.235764 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.235926 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.236298 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.241204 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.241672 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm" (OuterVolumeSpecName: "kube-api-access-k79jm") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "kube-api-access-k79jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.242587 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.243538 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.246388 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.246655 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.246929 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.247373 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.247624 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "da405e0d-550f-42a2-8b4e-a387eabb8e0a" (UID: "da405e0d-550f-42a2-8b4e-a387eabb8e0a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336474 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336510 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336521 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336532 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336543 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336553 4784 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336564 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k79jm\" (UniqueName: \"kubernetes.io/projected/da405e0d-550f-42a2-8b4e-a387eabb8e0a-kube-api-access-k79jm\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336573 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336581 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336589 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336597 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.336606 4784 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/da405e0d-550f-42a2-8b4e-a387eabb8e0a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.788447 4784 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.805501 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" event={"ID":"da405e0d-550f-42a2-8b4e-a387eabb8e0a","Type":"ContainerDied","Data":"ba2b8cb3abcff27c986b848d1c561f362fac0c53b509b057c437687c31c0f353"} Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.805565 4784 scope.go:117] "RemoveContainer" containerID="b9970169ea79c86d085d07d939b108567ec2b85e42b6c08645c31f4c1fbfa6c9" Dec 05 12:29:44 crc kubenswrapper[4784]: I1205 12:29:44.805591 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kld95" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.042147 4784 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7ffc8bf5-e794-49fc-a221-4957f6af6638" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.078581 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.082356 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:29:45 crc kubenswrapper[4784]: E1205 12:29:45.219810 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-provider-selection\": Failed to watch *v1.Secret: unknown (get secrets)" logger="UnhandledError" Dec 05 12:29:45 crc kubenswrapper[4784]: E1205 12:29:45.268522 4784 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.811221 4784 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.811253 4784 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="47a0de2a-77d6-40f1-abe1-767b65e73b74" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.814949 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 12:29:45 crc kubenswrapper[4784]: I1205 12:29:45.815631 4784 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7ffc8bf5-e794-49fc-a221-4957f6af6638" Dec 05 12:29:49 crc kubenswrapper[4784]: I1205 12:29:49.998078 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:49 crc kubenswrapper[4784]: I1205 12:29:49.999014 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:50 crc kubenswrapper[4784]: I1205 12:29:50.836991 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" event={"ID":"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb","Type":"ContainerStarted","Data":"b920cf5e63ff8740a699287ca8f455b82b8a2b92c45d63e21af09b23dadae0aa"} Dec 05 12:29:50 crc kubenswrapper[4784]: I1205 12:29:50.837631 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" event={"ID":"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb","Type":"ContainerStarted","Data":"98a7dfa60a1e007a9409d27bd0963ed8285bdb92abe2de0cca3aee28d2c0ca90"} Dec 05 12:29:50 crc kubenswrapper[4784]: I1205 12:29:50.837998 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:50 crc kubenswrapper[4784]: I1205 12:29:50.839124 4784 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kvfdl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Dec 05 12:29:50 crc kubenswrapper[4784]: I1205 12:29:50.839181 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Dec 05 12:29:51 crc kubenswrapper[4784]: I1205 12:29:51.849558 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/0.log" Dec 05 12:29:51 crc kubenswrapper[4784]: I1205 12:29:51.849615 4784 generic.go:334] "Generic (PLEG): container finished" podID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" containerID="b920cf5e63ff8740a699287ca8f455b82b8a2b92c45d63e21af09b23dadae0aa" exitCode=1 Dec 05 12:29:51 crc kubenswrapper[4784]: I1205 12:29:51.849649 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" event={"ID":"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb","Type":"ContainerDied","Data":"b920cf5e63ff8740a699287ca8f455b82b8a2b92c45d63e21af09b23dadae0aa"} Dec 05 12:29:51 crc kubenswrapper[4784]: I1205 12:29:51.850036 4784 scope.go:117] "RemoveContainer" containerID="b920cf5e63ff8740a699287ca8f455b82b8a2b92c45d63e21af09b23dadae0aa" Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.278659 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.856056 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/1.log" Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.856995 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/0.log" Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.857071 4784 generic.go:334] "Generic (PLEG): container finished" podID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" containerID="54e7aa2db90313fd72595a69a8a97b25ec8d6562efa67cf159b44c13b7b72643" exitCode=1 Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.857112 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" event={"ID":"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb","Type":"ContainerDied","Data":"54e7aa2db90313fd72595a69a8a97b25ec8d6562efa67cf159b44c13b7b72643"} Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.857151 4784 scope.go:117] "RemoveContainer" containerID="b920cf5e63ff8740a699287ca8f455b82b8a2b92c45d63e21af09b23dadae0aa" Dec 05 12:29:52 crc kubenswrapper[4784]: I1205 12:29:52.857740 4784 scope.go:117] "RemoveContainer" containerID="54e7aa2db90313fd72595a69a8a97b25ec8d6562efa67cf159b44c13b7b72643" Dec 05 12:29:52 crc kubenswrapper[4784]: E1205 12:29:52.858004 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\"" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Dec 05 12:29:53 crc kubenswrapper[4784]: I1205 12:29:53.864979 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/1.log" Dec 05 12:29:53 crc kubenswrapper[4784]: I1205 12:29:53.866213 4784 scope.go:117] "RemoveContainer" containerID="54e7aa2db90313fd72595a69a8a97b25ec8d6562efa67cf159b44c13b7b72643" Dec 05 12:29:53 crc kubenswrapper[4784]: E1205 12:29:53.866467 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-kvfdl_openshift-marketplace(4825dcfb-cf17-4a0d-b4f2-4f46c87beccb)\"" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podUID="4825dcfb-cf17-4a0d-b4f2-4f46c87beccb" Dec 05 12:29:54 crc kubenswrapper[4784]: I1205 12:29:54.444454 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 12:29:54 crc kubenswrapper[4784]: I1205 12:29:54.468294 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 12:29:54 crc kubenswrapper[4784]: I1205 12:29:54.912103 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.211839 4784 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.504802 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.676172 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.977686 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.984114 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 12:29:55 crc kubenswrapper[4784]: I1205 12:29:55.989204 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.022499 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.151330 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.199680 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.364682 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.519097 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.573053 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.617640 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.625544 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.694535 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.781071 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.835954 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 12:29:56 crc kubenswrapper[4784]: I1205 12:29:56.905222 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.053924 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.068249 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.326141 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.435155 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.436778 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.463408 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.522511 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.601922 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.868066 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.992278 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:29:57 crc kubenswrapper[4784]: I1205 12:29:57.997007 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.007087 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.024887 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.125687 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.162323 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.216253 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.286742 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.431930 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.449934 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.586229 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.640884 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.667316 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.760364 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.927156 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 12:29:58 crc kubenswrapper[4784]: I1205 12:29:58.953761 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.006422 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.030346 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.032396 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.062068 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.098492 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.129717 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.144957 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.216526 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.256645 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.304544 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.331596 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.493584 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.523552 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.557161 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.601832 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.633151 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.659841 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.673761 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.673778 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.676080 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.707683 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.828887 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.891928 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.919926 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 12:29:59 crc kubenswrapper[4784]: I1205 12:29:59.986478 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.149857 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.180733 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.203230 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.333100 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.510481 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.523412 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.602312 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.703376 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.729947 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 12:30:00 crc kubenswrapper[4784]: I1205 12:30:00.793786 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.037399 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.047813 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.070531 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.083823 4784 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.162156 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.246806 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.281436 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.351250 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.418644 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.558285 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.609447 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.641460 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.706458 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.773754 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.824302 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.854215 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.950158 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.951599 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 12:30:01 crc kubenswrapper[4784]: I1205 12:30:01.988132 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.004323 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.040891 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.152236 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.278544 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.278601 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.279266 4784 scope.go:117] "RemoveContainer" containerID="54e7aa2db90313fd72595a69a8a97b25ec8d6562efa67cf159b44c13b7b72643" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.458199 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.509824 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.514840 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.547881 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.587909 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.659770 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.808123 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.907453 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.907543 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/1.log" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.907635 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" event={"ID":"4825dcfb-cf17-4a0d-b4f2-4f46c87beccb","Type":"ContainerStarted","Data":"6bbaf7b7b42ca7648964c37fefcf8d9f9a3fff2d11cc62cc75fc6c1cc37d1ab5"} Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.908048 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.939650 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 12:30:02 crc kubenswrapper[4784]: I1205 12:30:02.942142 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.045239 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.163539 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.310635 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.396491 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.419286 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.421930 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.454749 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.466671 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.526689 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.604843 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.815056 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.888569 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.888740 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 12:30:03 crc kubenswrapper[4784]: I1205 12:30:03.975606 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.056994 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.065696 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.141961 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.224870 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.255609 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.430300 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.433576 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.452999 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.487675 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.500087 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.580813 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.587585 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.600216 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.654064 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.739647 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.765814 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.849014 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.897029 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.923762 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.944851 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.947866 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 12:30:04 crc kubenswrapper[4784]: I1205 12:30:04.988127 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.006610 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.007889 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.014416 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.084591 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.155012 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.161223 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.168543 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.205569 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.219126 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.271432 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.298926 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.349711 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.355675 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.370655 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.391842 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.409528 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.471205 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.549033 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.625222 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.659353 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.670299 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.752775 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.752775 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.757628 4784 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.868578 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 12:30:05 crc kubenswrapper[4784]: I1205 12:30:05.958800 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.173876 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.190724 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.240125 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.312498 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.410224 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.479347 4784 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.551830 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.557837 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.564418 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.730687 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.758989 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 12:30:06 crc kubenswrapper[4784]: I1205 12:30:06.838877 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:06.928667 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.104037 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.132780 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.315924 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.316459 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.322426 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.373870 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.522394 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.527957 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.528712 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.588043 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.650787 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.720293 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.781797 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:30:07 crc kubenswrapper[4784]: I1205 12:30:07.969348 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.212338 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.239736 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.362477 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.629388 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.684044 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.745258 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.759800 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.775067 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.838030 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 12:30:08 crc kubenswrapper[4784]: I1205 12:30:08.990984 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.060661 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.080904 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.101523 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.181363 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.197340 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.252960 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.315927 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.458427 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.513512 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.515740 4784 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.519053 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kvfdl" podStartSLOduration=48.519037212 podStartE2EDuration="48.519037212s" podCreationTimestamp="2025-12-05 12:29:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:29:50.852416883 +0000 UTC m=+270.272483698" watchObservedRunningTime="2025-12-05 12:30:09.519037212 +0000 UTC m=+288.939104027" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.520113 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-txh4x","openshift-marketplace/community-operators-pd864","openshift-marketplace/certified-operators-sf2pd","openshift-marketplace/redhat-operators-vlvmb","openshift-marketplace/redhat-marketplace-hkkm9","openshift-authentication/oauth-openshift-558db77b4-kld95","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.520177 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.520211 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfdl"] Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.520224 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.542013 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.541994662 podStartE2EDuration="25.541994662s" podCreationTimestamp="2025-12-05 12:29:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:09.5400951 +0000 UTC m=+288.960161925" watchObservedRunningTime="2025-12-05 12:30:09.541994662 +0000 UTC m=+288.962061477" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.612356 4784 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.674296 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.676792 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 12:30:09 crc kubenswrapper[4784]: I1205 12:30:09.898593 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.082883 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj"] Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083524 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerName="oauth-openshift" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083541 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerName="oauth-openshift" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083577 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083586 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083599 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083608 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083618 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083623 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083652 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083662 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083671 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083678 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083687 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083695 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083704 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083710 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083720 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" containerName="installer" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083727 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" containerName="installer" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083735 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083741 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083749 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083756 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083766 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083773 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="extract-utilities" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083785 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083792 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083802 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083808 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: E1205 12:30:10.083815 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083820 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="extract-content" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083947 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083983 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.083991 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" containerName="marketplace-operator" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.084001 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.084010 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="390fc8b7-86b2-4ecc-a41f-1fffd11b1a22" containerName="installer" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.084023 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" containerName="oauth-openshift" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.084034 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" containerName="registry-server" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.084775 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.089939 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.090462 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj"] Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.090975 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.148633 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.158072 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.203718 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.207711 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.207745 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfw2n\" (UniqueName: \"kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.207939 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.309643 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.309698 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfw2n\" (UniqueName: \"kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.309740 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.310629 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.316016 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.326229 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfw2n\" (UniqueName: \"kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n\") pod \"collect-profiles-29415630-zf2zj\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.416131 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.707350 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.823610 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj"] Dec 05 12:30:10 crc kubenswrapper[4784]: I1205 12:30:10.956577 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" event={"ID":"32012ed7-29ed-48ce-a59d-4d39e62e1672","Type":"ContainerStarted","Data":"97773e2ecaa95cb0e9b64a6fcff9eccdbe5c3d01bb8b58e5c682c9e2101ad848"} Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.005639 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="626019ff-24ba-4b81-b6ad-ba7c7085fa55" path="/var/lib/kubelet/pods/626019ff-24ba-4b81-b6ad-ba7c7085fa55/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.006705 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d81f1d0-3e85-443a-a738-2e0d9302d327" path="/var/lib/kubelet/pods/6d81f1d0-3e85-443a-a738-2e0d9302d327/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.007323 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be2166c8-c56d-46bd-ac93-e6eeb11ecba6" path="/var/lib/kubelet/pods/be2166c8-c56d-46bd-ac93-e6eeb11ecba6/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.008572 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da405e0d-550f-42a2-8b4e-a387eabb8e0a" path="/var/lib/kubelet/pods/da405e0d-550f-42a2-8b4e-a387eabb8e0a/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.009264 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e350bf27-d60f-4f5f-9bc0-460e997fed0c" path="/var/lib/kubelet/pods/e350bf27-d60f-4f5f-9bc0-460e997fed0c/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.009777 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6a64333-7a87-462a-996f-b3ce85e43c8f" path="/var/lib/kubelet/pods/f6a64333-7a87-462a-996f-b3ce85e43c8f/volumes" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.074810 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.963248 4784 generic.go:334] "Generic (PLEG): container finished" podID="32012ed7-29ed-48ce-a59d-4d39e62e1672" containerID="4d83f32dbe9e80cd2ae4ea65346b4894aa24b284d49351b941ae000059d79f57" exitCode=0 Dec 05 12:30:11 crc kubenswrapper[4784]: I1205 12:30:11.963302 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" event={"ID":"32012ed7-29ed-48ce-a59d-4d39e62e1672","Type":"ContainerDied","Data":"4d83f32dbe9e80cd2ae4ea65346b4894aa24b284d49351b941ae000059d79f57"} Dec 05 12:30:12 crc kubenswrapper[4784]: I1205 12:30:12.026473 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:30:12 crc kubenswrapper[4784]: I1205 12:30:12.032900 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.231187 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.348528 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume\") pod \"32012ed7-29ed-48ce-a59d-4d39e62e1672\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.348625 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfw2n\" (UniqueName: \"kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n\") pod \"32012ed7-29ed-48ce-a59d-4d39e62e1672\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.348649 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume\") pod \"32012ed7-29ed-48ce-a59d-4d39e62e1672\" (UID: \"32012ed7-29ed-48ce-a59d-4d39e62e1672\") " Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.349366 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume" (OuterVolumeSpecName: "config-volume") pod "32012ed7-29ed-48ce-a59d-4d39e62e1672" (UID: "32012ed7-29ed-48ce-a59d-4d39e62e1672"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.354768 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n" (OuterVolumeSpecName: "kube-api-access-tfw2n") pod "32012ed7-29ed-48ce-a59d-4d39e62e1672" (UID: "32012ed7-29ed-48ce-a59d-4d39e62e1672"). InnerVolumeSpecName "kube-api-access-tfw2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.355126 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "32012ed7-29ed-48ce-a59d-4d39e62e1672" (UID: "32012ed7-29ed-48ce-a59d-4d39e62e1672"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.449996 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32012ed7-29ed-48ce-a59d-4d39e62e1672-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.450051 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfw2n\" (UniqueName: \"kubernetes.io/projected/32012ed7-29ed-48ce-a59d-4d39e62e1672-kube-api-access-tfw2n\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.450064 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32012ed7-29ed-48ce-a59d-4d39e62e1672-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.975043 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" event={"ID":"32012ed7-29ed-48ce-a59d-4d39e62e1672","Type":"ContainerDied","Data":"97773e2ecaa95cb0e9b64a6fcff9eccdbe5c3d01bb8b58e5c682c9e2101ad848"} Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.975307 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97773e2ecaa95cb0e9b64a6fcff9eccdbe5c3d01bb8b58e5c682c9e2101ad848" Dec 05 12:30:13 crc kubenswrapper[4784]: I1205 12:30:13.975146 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.745658 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-786b6d57dd-msbch"] Dec 05 12:30:14 crc kubenswrapper[4784]: E1205 12:30:14.745924 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32012ed7-29ed-48ce-a59d-4d39e62e1672" containerName="collect-profiles" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.745938 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="32012ed7-29ed-48ce-a59d-4d39e62e1672" containerName="collect-profiles" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.746077 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="32012ed7-29ed-48ce-a59d-4d39e62e1672" containerName="collect-profiles" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.747161 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.751011 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.752106 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.752106 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.752476 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.752819 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.753540 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.753700 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.754476 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.755330 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.755441 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.755049 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.755585 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.758144 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-786b6d57dd-msbch"] Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.764172 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.765748 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.777529 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866202 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-policies\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866263 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzmrp\" (UniqueName: \"kubernetes.io/projected/51cd5b57-4997-4f19-a68f-703546a70ea0-kube-api-access-qzmrp\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866304 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866332 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866357 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866381 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866415 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866436 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866457 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866479 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866506 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866530 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866554 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-dir\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.866800 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968057 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968112 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-policies\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968137 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzmrp\" (UniqueName: \"kubernetes.io/projected/51cd5b57-4997-4f19-a68f-703546a70ea0-kube-api-access-qzmrp\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968164 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968211 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968230 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968255 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968280 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968296 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968326 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968355 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968380 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968398 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.968413 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-dir\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.970342 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-dir\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.970963 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.970966 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.971441 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-audit-policies\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.972540 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.974404 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.974504 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.974666 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.974979 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.975467 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.975940 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.976114 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.976701 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51cd5b57-4997-4f19-a68f-703546a70ea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:14 crc kubenswrapper[4784]: I1205 12:30:14.988010 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzmrp\" (UniqueName: \"kubernetes.io/projected/51cd5b57-4997-4f19-a68f-703546a70ea0-kube-api-access-qzmrp\") pod \"oauth-openshift-786b6d57dd-msbch\" (UID: \"51cd5b57-4997-4f19-a68f-703546a70ea0\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:15 crc kubenswrapper[4784]: I1205 12:30:15.088222 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:15 crc kubenswrapper[4784]: I1205 12:30:15.498658 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-786b6d57dd-msbch"] Dec 05 12:30:16 crc kubenswrapper[4784]: I1205 12:30:16.003893 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" event={"ID":"51cd5b57-4997-4f19-a68f-703546a70ea0","Type":"ContainerStarted","Data":"7e55eed69325f7dd7111233fd790c676a21b7ce6ca8b9a8caa20a6deba7cc9ed"} Dec 05 12:30:16 crc kubenswrapper[4784]: I1205 12:30:16.004291 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" event={"ID":"51cd5b57-4997-4f19-a68f-703546a70ea0","Type":"ContainerStarted","Data":"f9b329bd30d497a85a2003495258792fd439b123c7bd06f8f8f1787b57c63d00"} Dec 05 12:30:16 crc kubenswrapper[4784]: I1205 12:30:16.004315 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:16 crc kubenswrapper[4784]: I1205 12:30:16.244483 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" Dec 05 12:30:16 crc kubenswrapper[4784]: I1205 12:30:16.264675 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-786b6d57dd-msbch" podStartSLOduration=58.264657614 podStartE2EDuration="58.264657614s" podCreationTimestamp="2025-12-05 12:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:16.030758308 +0000 UTC m=+295.450825133" watchObservedRunningTime="2025-12-05 12:30:16.264657614 +0000 UTC m=+295.684724429" Dec 05 12:30:18 crc kubenswrapper[4784]: I1205 12:30:18.669247 4784 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 12:30:18 crc kubenswrapper[4784]: I1205 12:30:18.669475 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://1aab79e45ce0d338db945a1de844d1aec110d3bcdf6dfc35455ca82398492c06" gracePeriod=5 Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.049808 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.050444 4784 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="1aab79e45ce0d338db945a1de844d1aec110d3bcdf6dfc35455ca82398492c06" exitCode=137 Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.258899 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.259079 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.415874 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416041 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416210 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416327 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416443 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416469 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416493 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416634 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416659 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416920 4784 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.416992 4784 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.417049 4784 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.417114 4784 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.423961 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:30:24 crc kubenswrapper[4784]: I1205 12:30:24.517869 4784 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:25 crc kubenswrapper[4784]: I1205 12:30:25.004811 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 12:30:25 crc kubenswrapper[4784]: I1205 12:30:25.056746 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 12:30:25 crc kubenswrapper[4784]: I1205 12:30:25.056858 4784 scope.go:117] "RemoveContainer" containerID="1aab79e45ce0d338db945a1de844d1aec110d3bcdf6dfc35455ca82398492c06" Dec 05 12:30:25 crc kubenswrapper[4784]: I1205 12:30:25.056934 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.523835 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.524285 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerName="controller-manager" containerID="cri-o://1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141" gracePeriod=30 Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.618645 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.618847 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" podUID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" containerName="route-controller-manager" containerID="cri-o://1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef" gracePeriod=30 Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.863867 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.947917 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config\") pod \"96b39f3e-b508-4f02-ae7a-d391eeca4988\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.947993 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca\") pod \"96b39f3e-b508-4f02-ae7a-d391eeca4988\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.948019 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvphx\" (UniqueName: \"kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx\") pod \"96b39f3e-b508-4f02-ae7a-d391eeca4988\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.948066 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles\") pod \"96b39f3e-b508-4f02-ae7a-d391eeca4988\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.948117 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert\") pod \"96b39f3e-b508-4f02-ae7a-d391eeca4988\" (UID: \"96b39f3e-b508-4f02-ae7a-d391eeca4988\") " Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.949237 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca" (OuterVolumeSpecName: "client-ca") pod "96b39f3e-b508-4f02-ae7a-d391eeca4988" (UID: "96b39f3e-b508-4f02-ae7a-d391eeca4988"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.949280 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "96b39f3e-b508-4f02-ae7a-d391eeca4988" (UID: "96b39f3e-b508-4f02-ae7a-d391eeca4988"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.949426 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config" (OuterVolumeSpecName: "config") pod "96b39f3e-b508-4f02-ae7a-d391eeca4988" (UID: "96b39f3e-b508-4f02-ae7a-d391eeca4988"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.953859 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "96b39f3e-b508-4f02-ae7a-d391eeca4988" (UID: "96b39f3e-b508-4f02-ae7a-d391eeca4988"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.954547 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx" (OuterVolumeSpecName: "kube-api-access-tvphx") pod "96b39f3e-b508-4f02-ae7a-d391eeca4988" (UID: "96b39f3e-b508-4f02-ae7a-d391eeca4988"). InnerVolumeSpecName "kube-api-access-tvphx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:26 crc kubenswrapper[4784]: I1205 12:30:26.966675 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.049759 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config\") pod \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.049816 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert\") pod \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.049903 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca\") pod \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.049929 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzk9j\" (UniqueName: \"kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j\") pod \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\" (UID: \"1eb80025-ed6a-4509-99ca-57f7a4c9eefb\") " Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.052955 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.052988 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.053003 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvphx\" (UniqueName: \"kubernetes.io/projected/96b39f3e-b508-4f02-ae7a-d391eeca4988-kube-api-access-tvphx\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.053016 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/96b39f3e-b508-4f02-ae7a-d391eeca4988-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.053027 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96b39f3e-b508-4f02-ae7a-d391eeca4988-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.054615 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config" (OuterVolumeSpecName: "config") pod "1eb80025-ed6a-4509-99ca-57f7a4c9eefb" (UID: "1eb80025-ed6a-4509-99ca-57f7a4c9eefb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.054800 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca" (OuterVolumeSpecName: "client-ca") pod "1eb80025-ed6a-4509-99ca-57f7a4c9eefb" (UID: "1eb80025-ed6a-4509-99ca-57f7a4c9eefb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.058068 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1eb80025-ed6a-4509-99ca-57f7a4c9eefb" (UID: "1eb80025-ed6a-4509-99ca-57f7a4c9eefb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.058476 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j" (OuterVolumeSpecName: "kube-api-access-xzk9j") pod "1eb80025-ed6a-4509-99ca-57f7a4c9eefb" (UID: "1eb80025-ed6a-4509-99ca-57f7a4c9eefb"). InnerVolumeSpecName "kube-api-access-xzk9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.059641 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:27 crc kubenswrapper[4784]: E1205 12:30:27.059884 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" containerName="route-controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.059910 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" containerName="route-controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: E1205 12:30:27.059925 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.059932 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:30:27 crc kubenswrapper[4784]: E1205 12:30:27.059943 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerName="controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.059951 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerName="controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.060076 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" containerName="route-controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.060093 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.060108 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerName="controller-manager" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.060594 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.072092 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.076931 4784 generic.go:334] "Generic (PLEG): container finished" podID="96b39f3e-b508-4f02-ae7a-d391eeca4988" containerID="1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141" exitCode=0 Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.077059 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" event={"ID":"96b39f3e-b508-4f02-ae7a-d391eeca4988","Type":"ContainerDied","Data":"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141"} Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.077111 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" event={"ID":"96b39f3e-b508-4f02-ae7a-d391eeca4988","Type":"ContainerDied","Data":"bb3daf14eaf2b2910ebd356947fc88784d35a61d904d5a33d02b0525ba264e13"} Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.077134 4784 scope.go:117] "RemoveContainer" containerID="1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.077340 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2drbk" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.080854 4784 generic.go:334] "Generic (PLEG): container finished" podID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" containerID="1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef" exitCode=0 Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.080897 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" event={"ID":"1eb80025-ed6a-4509-99ca-57f7a4c9eefb","Type":"ContainerDied","Data":"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef"} Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.080922 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" event={"ID":"1eb80025-ed6a-4509-99ca-57f7a4c9eefb","Type":"ContainerDied","Data":"0a401e36ff269ba3931b5640e4c6546ed34f39dd1e5f7ab44ee79f7884ddf59f"} Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.080971 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.101290 4784 scope.go:117] "RemoveContainer" containerID="1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141" Dec 05 12:30:27 crc kubenswrapper[4784]: E1205 12:30:27.103524 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141\": container with ID starting with 1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141 not found: ID does not exist" containerID="1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.104048 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141"} err="failed to get container status \"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141\": rpc error: code = NotFound desc = could not find container \"1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141\": container with ID starting with 1ca2240bab3459a80b01224ca88bc88e6f780a9712287579feb51f56301de141 not found: ID does not exist" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.104168 4784 scope.go:117] "RemoveContainer" containerID="1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.114134 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.119068 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.120153 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.123788 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.124028 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.124305 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.124471 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.124552 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.125767 4784 scope.go:117] "RemoveContainer" containerID="1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef" Dec 05 12:30:27 crc kubenswrapper[4784]: E1205 12:30:27.126479 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef\": container with ID starting with 1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef not found: ID does not exist" containerID="1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.126544 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef"} err="failed to get container status \"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef\": rpc error: code = NotFound desc = could not find container \"1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef\": container with ID starting with 1d7c0360a54b382bf2157239816176812981170752723c962d30d8d657d982ef not found: ID does not exist" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.126813 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.130868 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2drbk"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.146704 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.149038 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b2lpc"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154607 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154670 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154701 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154721 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf6v6\" (UniqueName: \"kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154748 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154785 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154795 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154805 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.154813 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzk9j\" (UniqueName: \"kubernetes.io/projected/1eb80025-ed6a-4509-99ca-57f7a4c9eefb-kube-api-access-xzk9j\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.158395 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256220 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256303 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf6v6\" (UniqueName: \"kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256354 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256392 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256421 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256453 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb2pw\" (UniqueName: \"kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256498 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256535 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.256557 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.258169 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.258434 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.258622 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.262033 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.273303 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf6v6\" (UniqueName: \"kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6\") pod \"controller-manager-598cbb748b-zc5nq\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.357785 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.357864 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.357911 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb2pw\" (UniqueName: \"kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.357965 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.358990 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.359206 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.361413 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.374753 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb2pw\" (UniqueName: \"kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw\") pod \"route-controller-manager-94b4d8c88-mw7ms\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.385607 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.436583 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.576722 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:27 crc kubenswrapper[4784]: W1205 12:30:27.581947 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod210b7bb2_9aee_42f0_8bfb_6001edf1c852.slice/crio-0e28cfd381b876aa316bb9271946b9473f539650a2ec39c75ead051b0a353496 WatchSource:0}: Error finding container 0e28cfd381b876aa316bb9271946b9473f539650a2ec39c75ead051b0a353496: Status 404 returned error can't find the container with id 0e28cfd381b876aa316bb9271946b9473f539650a2ec39c75ead051b0a353496 Dec 05 12:30:27 crc kubenswrapper[4784]: I1205 12:30:27.643044 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:27 crc kubenswrapper[4784]: W1205 12:30:27.653326 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9765e425_f320_404c_aff3_7a532e373c0e.slice/crio-7de51b1471d9c74197a0272ffd47ce3a1124675e4ed2f56d9dd84053b5537832 WatchSource:0}: Error finding container 7de51b1471d9c74197a0272ffd47ce3a1124675e4ed2f56d9dd84053b5537832: Status 404 returned error can't find the container with id 7de51b1471d9c74197a0272ffd47ce3a1124675e4ed2f56d9dd84053b5537832 Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.088892 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" event={"ID":"9765e425-f320-404c-aff3-7a532e373c0e","Type":"ContainerStarted","Data":"cbe62a97499235b3e09f5dde8108ce890b4ade0f9b571f4d16892748a8171616"} Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.089705 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" event={"ID":"9765e425-f320-404c-aff3-7a532e373c0e","Type":"ContainerStarted","Data":"7de51b1471d9c74197a0272ffd47ce3a1124675e4ed2f56d9dd84053b5537832"} Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.089733 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.091881 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" event={"ID":"210b7bb2-9aee-42f0-8bfb-6001edf1c852","Type":"ContainerStarted","Data":"e87fe77b5296046809c31e8830defec7b994d3b328a1e20661884a18258b536f"} Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.091924 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" event={"ID":"210b7bb2-9aee-42f0-8bfb-6001edf1c852","Type":"ContainerStarted","Data":"0e28cfd381b876aa316bb9271946b9473f539650a2ec39c75ead051b0a353496"} Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.092087 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.099087 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.115056 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.132716 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" podStartSLOduration=1.132699283 podStartE2EDuration="1.132699283s" podCreationTimestamp="2025-12-05 12:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:28.110148517 +0000 UTC m=+307.530215352" watchObservedRunningTime="2025-12-05 12:30:28.132699283 +0000 UTC m=+307.552766098" Dec 05 12:30:28 crc kubenswrapper[4784]: I1205 12:30:28.133839 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" podStartSLOduration=1.13383528 podStartE2EDuration="1.13383528s" podCreationTimestamp="2025-12-05 12:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:28.13076561 +0000 UTC m=+307.550832425" watchObservedRunningTime="2025-12-05 12:30:28.13383528 +0000 UTC m=+307.553902085" Dec 05 12:30:29 crc kubenswrapper[4784]: I1205 12:30:29.004478 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb80025-ed6a-4509-99ca-57f7a4c9eefb" path="/var/lib/kubelet/pods/1eb80025-ed6a-4509-99ca-57f7a4c9eefb/volumes" Dec 05 12:30:29 crc kubenswrapper[4784]: I1205 12:30:29.005108 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b39f3e-b508-4f02-ae7a-d391eeca4988" path="/var/lib/kubelet/pods/96b39f3e-b508-4f02-ae7a-d391eeca4988/volumes" Dec 05 12:30:34 crc kubenswrapper[4784]: I1205 12:30:34.560085 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" podUID="ec9447bc-e76f-4943-9f80-f4d121ff1322" containerName="registry" containerID="cri-o://0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233" gracePeriod=30 Dec 05 12:30:34 crc kubenswrapper[4784]: I1205 12:30:34.956666 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067382 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067573 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067611 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067627 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067665 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067714 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067765 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.067812 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfv4q\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q\") pod \"ec9447bc-e76f-4943-9f80-f4d121ff1322\" (UID: \"ec9447bc-e76f-4943-9f80-f4d121ff1322\") " Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.068764 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.068805 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.082921 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.083432 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q" (OuterVolumeSpecName: "kube-api-access-jfv4q") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "kube-api-access-jfv4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.083818 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.085885 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.086092 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.106764 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ec9447bc-e76f-4943-9f80-f4d121ff1322" (UID: "ec9447bc-e76f-4943-9f80-f4d121ff1322"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.127548 4784 generic.go:334] "Generic (PLEG): container finished" podID="ec9447bc-e76f-4943-9f80-f4d121ff1322" containerID="0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233" exitCode=0 Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.127595 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.127602 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" event={"ID":"ec9447bc-e76f-4943-9f80-f4d121ff1322","Type":"ContainerDied","Data":"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233"} Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.127633 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zjhhs" event={"ID":"ec9447bc-e76f-4943-9f80-f4d121ff1322","Type":"ContainerDied","Data":"fa2a62ee2ef217b79e7e8af7cbe33c6596068dcd54315ff5f040c752e1577bdf"} Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.127673 4784 scope.go:117] "RemoveContainer" containerID="0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.149415 4784 scope.go:117] "RemoveContainer" containerID="0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233" Dec 05 12:30:35 crc kubenswrapper[4784]: E1205 12:30:35.149996 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233\": container with ID starting with 0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233 not found: ID does not exist" containerID="0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.150035 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233"} err="failed to get container status \"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233\": rpc error: code = NotFound desc = could not find container \"0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233\": container with ID starting with 0f4e37afc3a14cf3a142b9d7f3016a0ec674af1890c074cac9741b898b703233 not found: ID does not exist" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169022 4784 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ec9447bc-e76f-4943-9f80-f4d121ff1322-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169068 4784 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169083 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfv4q\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-kube-api-access-jfv4q\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169095 4784 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ec9447bc-e76f-4943-9f80-f4d121ff1322-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169107 4784 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169117 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec9447bc-e76f-4943-9f80-f4d121ff1322-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169127 4784 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ec9447bc-e76f-4943-9f80-f4d121ff1322-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.169156 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.173372 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zjhhs"] Dec 05 12:30:35 crc kubenswrapper[4784]: I1205 12:30:35.176837 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.330595 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hg45x"] Dec 05 12:30:36 crc kubenswrapper[4784]: E1205 12:30:36.330794 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec9447bc-e76f-4943-9f80-f4d121ff1322" containerName="registry" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.330806 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec9447bc-e76f-4943-9f80-f4d121ff1322" containerName="registry" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.330897 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec9447bc-e76f-4943-9f80-f4d121ff1322" containerName="registry" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.331548 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.334147 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.354820 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hg45x"] Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.482089 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-catalog-content\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.482177 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kkxh\" (UniqueName: \"kubernetes.io/projected/24a95ae6-0c84-4572-bfc4-5acb5295577c-kube-api-access-4kkxh\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.482279 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-utilities\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.526138 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6trnd"] Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.527665 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.529606 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.538572 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6trnd"] Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.584032 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kkxh\" (UniqueName: \"kubernetes.io/projected/24a95ae6-0c84-4572-bfc4-5acb5295577c-kube-api-access-4kkxh\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.584102 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-utilities\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.584153 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-catalog-content\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.584673 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-catalog-content\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.584718 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24a95ae6-0c84-4572-bfc4-5acb5295577c-utilities\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.601427 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kkxh\" (UniqueName: \"kubernetes.io/projected/24a95ae6-0c84-4572-bfc4-5acb5295577c-kube-api-access-4kkxh\") pod \"redhat-marketplace-hg45x\" (UID: \"24a95ae6-0c84-4572-bfc4-5acb5295577c\") " pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.660489 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.685403 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrgt6\" (UniqueName: \"kubernetes.io/projected/55e564fa-612a-4e0b-bc29-09e5384fe16c-kube-api-access-xrgt6\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.685449 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-catalog-content\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.685525 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-utilities\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.787057 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-utilities\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.787107 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrgt6\" (UniqueName: \"kubernetes.io/projected/55e564fa-612a-4e0b-bc29-09e5384fe16c-kube-api-access-xrgt6\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.787138 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-catalog-content\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.787896 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-utilities\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.787809 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e564fa-612a-4e0b-bc29-09e5384fe16c-catalog-content\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.811397 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrgt6\" (UniqueName: \"kubernetes.io/projected/55e564fa-612a-4e0b-bc29-09e5384fe16c-kube-api-access-xrgt6\") pod \"community-operators-6trnd\" (UID: \"55e564fa-612a-4e0b-bc29-09e5384fe16c\") " pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:36 crc kubenswrapper[4784]: I1205 12:30:36.845139 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:37 crc kubenswrapper[4784]: I1205 12:30:37.007201 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec9447bc-e76f-4943-9f80-f4d121ff1322" path="/var/lib/kubelet/pods/ec9447bc-e76f-4943-9f80-f4d121ff1322/volumes" Dec 05 12:30:37 crc kubenswrapper[4784]: I1205 12:30:37.067647 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hg45x"] Dec 05 12:30:37 crc kubenswrapper[4784]: I1205 12:30:37.140279 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hg45x" event={"ID":"24a95ae6-0c84-4572-bfc4-5acb5295577c","Type":"ContainerStarted","Data":"5dd8a3f0bcf38ba47216ba74fed7801aa1bfb4752c89b1c3ee4d42abf416efd8"} Dec 05 12:30:37 crc kubenswrapper[4784]: I1205 12:30:37.251871 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6trnd"] Dec 05 12:30:37 crc kubenswrapper[4784]: W1205 12:30:37.262606 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55e564fa_612a_4e0b_bc29_09e5384fe16c.slice/crio-c430cacbfd4e8f007f5de12ad9638b642a65f33983ae52c4a9d8ddeca36e526b WatchSource:0}: Error finding container c430cacbfd4e8f007f5de12ad9638b642a65f33983ae52c4a9d8ddeca36e526b: Status 404 returned error can't find the container with id c430cacbfd4e8f007f5de12ad9638b642a65f33983ae52c4a9d8ddeca36e526b Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.146472 4784 generic.go:334] "Generic (PLEG): container finished" podID="24a95ae6-0c84-4572-bfc4-5acb5295577c" containerID="727534b75594295800de5368b04e3ccf6a79d09ec58b0c36d5a80ee1febe8af8" exitCode=0 Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.146558 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hg45x" event={"ID":"24a95ae6-0c84-4572-bfc4-5acb5295577c","Type":"ContainerDied","Data":"727534b75594295800de5368b04e3ccf6a79d09ec58b0c36d5a80ee1febe8af8"} Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.148737 4784 generic.go:334] "Generic (PLEG): container finished" podID="55e564fa-612a-4e0b-bc29-09e5384fe16c" containerID="2aeca0d7346a339e9393d0165ad5fdcfaf1e287cca62bbf8975e5d2ea0103a13" exitCode=0 Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.148763 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trnd" event={"ID":"55e564fa-612a-4e0b-bc29-09e5384fe16c","Type":"ContainerDied","Data":"2aeca0d7346a339e9393d0165ad5fdcfaf1e287cca62bbf8975e5d2ea0103a13"} Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.148781 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trnd" event={"ID":"55e564fa-612a-4e0b-bc29-09e5384fe16c","Type":"ContainerStarted","Data":"c430cacbfd4e8f007f5de12ad9638b642a65f33983ae52c4a9d8ddeca36e526b"} Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.926843 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xlw8f"] Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.928084 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.929679 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 12:30:38 crc kubenswrapper[4784]: I1205 12:30:38.946808 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlw8f"] Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.015025 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7h6c\" (UniqueName: \"kubernetes.io/projected/564caa58-786b-44bd-96a5-963c2e8343f7-kube-api-access-d7h6c\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.015102 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-catalog-content\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.015123 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-utilities\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.116207 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-catalog-content\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.116266 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-utilities\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.116339 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7h6c\" (UniqueName: \"kubernetes.io/projected/564caa58-786b-44bd-96a5-963c2e8343f7-kube-api-access-d7h6c\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.117157 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-catalog-content\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.117432 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564caa58-786b-44bd-96a5-963c2e8343f7-utilities\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.129522 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pbmwm"] Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.130527 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.132139 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.140066 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7h6c\" (UniqueName: \"kubernetes.io/projected/564caa58-786b-44bd-96a5-963c2e8343f7-kube-api-access-d7h6c\") pod \"redhat-operators-xlw8f\" (UID: \"564caa58-786b-44bd-96a5-963c2e8343f7\") " pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.153533 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbmwm"] Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.166468 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trnd" event={"ID":"55e564fa-612a-4e0b-bc29-09e5384fe16c","Type":"ContainerStarted","Data":"e04671fe60558cdd8e7102610dc13b7ec2d5055425d35d362126bb6b86aa3fb5"} Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.180478 4784 generic.go:334] "Generic (PLEG): container finished" podID="24a95ae6-0c84-4572-bfc4-5acb5295577c" containerID="835da0f8ba03b66c7a86fd45b6aae80ebcc593d0096b0fcaf124287e8a2bef1b" exitCode=0 Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.180543 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hg45x" event={"ID":"24a95ae6-0c84-4572-bfc4-5acb5295577c","Type":"ContainerDied","Data":"835da0f8ba03b66c7a86fd45b6aae80ebcc593d0096b0fcaf124287e8a2bef1b"} Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.217733 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-utilities\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.217828 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-catalog-content\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.218020 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4g5d\" (UniqueName: \"kubernetes.io/projected/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-kube-api-access-r4g5d\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.245013 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.319806 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-utilities\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.319872 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-catalog-content\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.319889 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4g5d\" (UniqueName: \"kubernetes.io/projected/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-kube-api-access-r4g5d\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.320278 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-utilities\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.320680 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-catalog-content\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.341316 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4g5d\" (UniqueName: \"kubernetes.io/projected/7ab4604a-3a19-4d0b-b6a0-b8d7274df317-kube-api-access-r4g5d\") pod \"certified-operators-pbmwm\" (UID: \"7ab4604a-3a19-4d0b-b6a0-b8d7274df317\") " pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.478018 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.657833 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlw8f"] Dec 05 12:30:39 crc kubenswrapper[4784]: W1205 12:30:39.662975 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod564caa58_786b_44bd_96a5_963c2e8343f7.slice/crio-b9d5d5e93f3a22dd5f92131858bc8dc034518149c8e5b4780fcef088d1055a5d WatchSource:0}: Error finding container b9d5d5e93f3a22dd5f92131858bc8dc034518149c8e5b4780fcef088d1055a5d: Status 404 returned error can't find the container with id b9d5d5e93f3a22dd5f92131858bc8dc034518149c8e5b4780fcef088d1055a5d Dec 05 12:30:39 crc kubenswrapper[4784]: I1205 12:30:39.858698 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbmwm"] Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.188594 4784 generic.go:334] "Generic (PLEG): container finished" podID="55e564fa-612a-4e0b-bc29-09e5384fe16c" containerID="e04671fe60558cdd8e7102610dc13b7ec2d5055425d35d362126bb6b86aa3fb5" exitCode=0 Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.188693 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trnd" event={"ID":"55e564fa-612a-4e0b-bc29-09e5384fe16c","Type":"ContainerDied","Data":"e04671fe60558cdd8e7102610dc13b7ec2d5055425d35d362126bb6b86aa3fb5"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.189938 4784 generic.go:334] "Generic (PLEG): container finished" podID="564caa58-786b-44bd-96a5-963c2e8343f7" containerID="a698769d8e6a3ec183b428d818c1d261f4440d2fb4c7c8fdf6bc9bed5862c2b5" exitCode=0 Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.190123 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlw8f" event={"ID":"564caa58-786b-44bd-96a5-963c2e8343f7","Type":"ContainerDied","Data":"a698769d8e6a3ec183b428d818c1d261f4440d2fb4c7c8fdf6bc9bed5862c2b5"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.190514 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlw8f" event={"ID":"564caa58-786b-44bd-96a5-963c2e8343f7","Type":"ContainerStarted","Data":"b9d5d5e93f3a22dd5f92131858bc8dc034518149c8e5b4780fcef088d1055a5d"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.194090 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbmwm" event={"ID":"7ab4604a-3a19-4d0b-b6a0-b8d7274df317","Type":"ContainerDied","Data":"c42b15268af73cd2929671e7feaca2e6f56695e55b855f0a275e4194ecaec121"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.193940 4784 generic.go:334] "Generic (PLEG): container finished" podID="7ab4604a-3a19-4d0b-b6a0-b8d7274df317" containerID="c42b15268af73cd2929671e7feaca2e6f56695e55b855f0a275e4194ecaec121" exitCode=0 Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.195101 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbmwm" event={"ID":"7ab4604a-3a19-4d0b-b6a0-b8d7274df317","Type":"ContainerStarted","Data":"ff8ca8809f2edfa48dee26cc29ea9b7220210e641231f3212949717cc32b87c9"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.203934 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hg45x" event={"ID":"24a95ae6-0c84-4572-bfc4-5acb5295577c","Type":"ContainerStarted","Data":"4a87a6b8219d0beca809fb67382989c5ef8d8b14a0c067c3ac8c96024f9a5d56"} Dec 05 12:30:40 crc kubenswrapper[4784]: I1205 12:30:40.233139 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hg45x" podStartSLOduration=2.455809194 podStartE2EDuration="4.233117596s" podCreationTimestamp="2025-12-05 12:30:36 +0000 UTC" firstStartedPulling="2025-12-05 12:30:38.147660617 +0000 UTC m=+317.567727432" lastFinishedPulling="2025-12-05 12:30:39.924969009 +0000 UTC m=+319.345035834" observedRunningTime="2025-12-05 12:30:40.231557905 +0000 UTC m=+319.651624720" watchObservedRunningTime="2025-12-05 12:30:40.233117596 +0000 UTC m=+319.653184411" Dec 05 12:30:41 crc kubenswrapper[4784]: I1205 12:30:41.214410 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trnd" event={"ID":"55e564fa-612a-4e0b-bc29-09e5384fe16c","Type":"ContainerStarted","Data":"9dd35a65d4d1937df8893de7ed83a9799386c7f970263112ca02ec2688fdeeb9"} Dec 05 12:30:41 crc kubenswrapper[4784]: I1205 12:30:41.218291 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlw8f" event={"ID":"564caa58-786b-44bd-96a5-963c2e8343f7","Type":"ContainerStarted","Data":"4faa20cc18727758472b0c2f5695ae9b167bf4c1b02596f2242a476c815b662a"} Dec 05 12:30:41 crc kubenswrapper[4784]: I1205 12:30:41.224268 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbmwm" event={"ID":"7ab4604a-3a19-4d0b-b6a0-b8d7274df317","Type":"ContainerStarted","Data":"5061bfba4ede197a19d44dfb08655acb65211878c76f5a4e2e41136acdeca6ef"} Dec 05 12:30:41 crc kubenswrapper[4784]: I1205 12:30:41.262381 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6trnd" podStartSLOduration=2.690775056 podStartE2EDuration="5.262358852s" podCreationTimestamp="2025-12-05 12:30:36 +0000 UTC" firstStartedPulling="2025-12-05 12:30:38.149907291 +0000 UTC m=+317.569974096" lastFinishedPulling="2025-12-05 12:30:40.721491077 +0000 UTC m=+320.141557892" observedRunningTime="2025-12-05 12:30:41.236743985 +0000 UTC m=+320.656810820" watchObservedRunningTime="2025-12-05 12:30:41.262358852 +0000 UTC m=+320.682425667" Dec 05 12:30:42 crc kubenswrapper[4784]: I1205 12:30:42.247447 4784 generic.go:334] "Generic (PLEG): container finished" podID="564caa58-786b-44bd-96a5-963c2e8343f7" containerID="4faa20cc18727758472b0c2f5695ae9b167bf4c1b02596f2242a476c815b662a" exitCode=0 Dec 05 12:30:42 crc kubenswrapper[4784]: I1205 12:30:42.247520 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlw8f" event={"ID":"564caa58-786b-44bd-96a5-963c2e8343f7","Type":"ContainerDied","Data":"4faa20cc18727758472b0c2f5695ae9b167bf4c1b02596f2242a476c815b662a"} Dec 05 12:30:42 crc kubenswrapper[4784]: I1205 12:30:42.251164 4784 generic.go:334] "Generic (PLEG): container finished" podID="7ab4604a-3a19-4d0b-b6a0-b8d7274df317" containerID="5061bfba4ede197a19d44dfb08655acb65211878c76f5a4e2e41136acdeca6ef" exitCode=0 Dec 05 12:30:42 crc kubenswrapper[4784]: I1205 12:30:42.251260 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbmwm" event={"ID":"7ab4604a-3a19-4d0b-b6a0-b8d7274df317","Type":"ContainerDied","Data":"5061bfba4ede197a19d44dfb08655acb65211878c76f5a4e2e41136acdeca6ef"} Dec 05 12:30:43 crc kubenswrapper[4784]: I1205 12:30:43.256747 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 12:30:43 crc kubenswrapper[4784]: I1205 12:30:43.260799 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlw8f" event={"ID":"564caa58-786b-44bd-96a5-963c2e8343f7","Type":"ContainerStarted","Data":"2bbb0d22914c08c412df62b6896a67cc243022152a97a39a58fa8a5b9b60f287"} Dec 05 12:30:43 crc kubenswrapper[4784]: I1205 12:30:43.264051 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbmwm" event={"ID":"7ab4604a-3a19-4d0b-b6a0-b8d7274df317","Type":"ContainerStarted","Data":"b5a616d362445bba749e76956293d3bb7f786209514bad6767bb1b1b625f5072"} Dec 05 12:30:43 crc kubenswrapper[4784]: I1205 12:30:43.355473 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xlw8f" podStartSLOduration=2.796903688 podStartE2EDuration="5.355454361s" podCreationTimestamp="2025-12-05 12:30:38 +0000 UTC" firstStartedPulling="2025-12-05 12:30:40.192587173 +0000 UTC m=+319.612653988" lastFinishedPulling="2025-12-05 12:30:42.751137846 +0000 UTC m=+322.171204661" observedRunningTime="2025-12-05 12:30:43.35418191 +0000 UTC m=+322.774248725" watchObservedRunningTime="2025-12-05 12:30:43.355454361 +0000 UTC m=+322.775521176" Dec 05 12:30:43 crc kubenswrapper[4784]: I1205 12:30:43.375425 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pbmwm" podStartSLOduration=1.92992455 podStartE2EDuration="4.375405392s" podCreationTimestamp="2025-12-05 12:30:39 +0000 UTC" firstStartedPulling="2025-12-05 12:30:40.195509309 +0000 UTC m=+319.615576124" lastFinishedPulling="2025-12-05 12:30:42.640990151 +0000 UTC m=+322.061056966" observedRunningTime="2025-12-05 12:30:43.372473797 +0000 UTC m=+322.792540612" watchObservedRunningTime="2025-12-05 12:30:43.375405392 +0000 UTC m=+322.795472207" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.338533 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.525680 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.525939 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerName="controller-manager" containerID="cri-o://e87fe77b5296046809c31e8830defec7b994d3b328a1e20661884a18258b536f" gracePeriod=30 Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.535829 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.536257 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" podUID="9765e425-f320-404c-aff3-7a532e373c0e" containerName="route-controller-manager" containerID="cri-o://cbe62a97499235b3e09f5dde8108ce890b4ade0f9b571f4d16892748a8171616" gracePeriod=30 Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.661205 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.661260 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.706944 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.846489 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.846897 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:46 crc kubenswrapper[4784]: I1205 12:30:46.884154 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.325849 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6trnd" Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.326332 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hg45x" Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.386614 4784 patch_prober.go:28] interesting pod/controller-manager-598cbb748b-zc5nq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.386664 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.438243 4784 patch_prober.go:28] interesting pod/route-controller-manager-94b4d8c88-mw7ms container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Dec 05 12:30:47 crc kubenswrapper[4784]: I1205 12:30:47.438306 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" podUID="9765e425-f320-404c-aff3-7a532e373c0e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.245363 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.245418 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.285486 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.337529 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xlw8f" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.479290 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.479665 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:49 crc kubenswrapper[4784]: I1205 12:30:49.522118 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:50 crc kubenswrapper[4784]: I1205 12:30:50.347502 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pbmwm" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.046827 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-94b4d8c88-mw7ms_9765e425-f320-404c-aff3-7a532e373c0e/route-controller-manager/0.log" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.047066 4784 generic.go:334] "Generic (PLEG): container finished" podID="9765e425-f320-404c-aff3-7a532e373c0e" containerID="cbe62a97499235b3e09f5dde8108ce890b4ade0f9b571f4d16892748a8171616" exitCode=-1 Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.047101 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" event={"ID":"9765e425-f320-404c-aff3-7a532e373c0e","Type":"ContainerDied","Data":"cbe62a97499235b3e09f5dde8108ce890b4ade0f9b571f4d16892748a8171616"} Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.048970 4784 generic.go:334] "Generic (PLEG): container finished" podID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerID="e87fe77b5296046809c31e8830defec7b994d3b328a1e20661884a18258b536f" exitCode=0 Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.049001 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" event={"ID":"210b7bb2-9aee-42f0-8bfb-6001edf1c852","Type":"ContainerDied","Data":"e87fe77b5296046809c31e8830defec7b994d3b328a1e20661884a18258b536f"} Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.597505 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.630494 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4"] Dec 05 12:30:55 crc kubenswrapper[4784]: E1205 12:30:55.630724 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9765e425-f320-404c-aff3-7a532e373c0e" containerName="route-controller-manager" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.630738 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9765e425-f320-404c-aff3-7a532e373c0e" containerName="route-controller-manager" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.630839 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9765e425-f320-404c-aff3-7a532e373c0e" containerName="route-controller-manager" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.631554 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.632391 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config\") pod \"9765e425-f320-404c-aff3-7a532e373c0e\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.632474 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wb2pw\" (UniqueName: \"kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw\") pod \"9765e425-f320-404c-aff3-7a532e373c0e\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.632534 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca\") pod \"9765e425-f320-404c-aff3-7a532e373c0e\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.632573 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert\") pod \"9765e425-f320-404c-aff3-7a532e373c0e\" (UID: \"9765e425-f320-404c-aff3-7a532e373c0e\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.633687 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config" (OuterVolumeSpecName: "config") pod "9765e425-f320-404c-aff3-7a532e373c0e" (UID: "9765e425-f320-404c-aff3-7a532e373c0e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.634160 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca" (OuterVolumeSpecName: "client-ca") pod "9765e425-f320-404c-aff3-7a532e373c0e" (UID: "9765e425-f320-404c-aff3-7a532e373c0e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.640961 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9765e425-f320-404c-aff3-7a532e373c0e" (UID: "9765e425-f320-404c-aff3-7a532e373c0e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.646457 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw" (OuterVolumeSpecName: "kube-api-access-wb2pw") pod "9765e425-f320-404c-aff3-7a532e373c0e" (UID: "9765e425-f320-404c-aff3-7a532e373c0e"). InnerVolumeSpecName "kube-api-access-wb2pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.655384 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4"] Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.704267 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733547 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert\") pod \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733664 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf6v6\" (UniqueName: \"kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6\") pod \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733740 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles\") pod \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733768 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config\") pod \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733797 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca\") pod \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\" (UID: \"210b7bb2-9aee-42f0-8bfb-6001edf1c852\") " Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.733989 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-config\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734058 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af67d3eb-5894-4c69-85dc-a01e544a814c-serving-cert\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734085 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-client-ca\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734116 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdr52\" (UniqueName: \"kubernetes.io/projected/af67d3eb-5894-4c69-85dc-a01e544a814c-kube-api-access-gdr52\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734179 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734212 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wb2pw\" (UniqueName: \"kubernetes.io/projected/9765e425-f320-404c-aff3-7a532e373c0e-kube-api-access-wb2pw\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734225 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9765e425-f320-404c-aff3-7a532e373c0e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.734237 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9765e425-f320-404c-aff3-7a532e373c0e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.735007 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca" (OuterVolumeSpecName: "client-ca") pod "210b7bb2-9aee-42f0-8bfb-6001edf1c852" (UID: "210b7bb2-9aee-42f0-8bfb-6001edf1c852"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.735136 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config" (OuterVolumeSpecName: "config") pod "210b7bb2-9aee-42f0-8bfb-6001edf1c852" (UID: "210b7bb2-9aee-42f0-8bfb-6001edf1c852"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.735408 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "210b7bb2-9aee-42f0-8bfb-6001edf1c852" (UID: "210b7bb2-9aee-42f0-8bfb-6001edf1c852"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.740385 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210b7bb2-9aee-42f0-8bfb-6001edf1c852" (UID: "210b7bb2-9aee-42f0-8bfb-6001edf1c852"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.740555 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6" (OuterVolumeSpecName: "kube-api-access-mf6v6") pod "210b7bb2-9aee-42f0-8bfb-6001edf1c852" (UID: "210b7bb2-9aee-42f0-8bfb-6001edf1c852"). InnerVolumeSpecName "kube-api-access-mf6v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835515 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-config\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835651 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af67d3eb-5894-4c69-85dc-a01e544a814c-serving-cert\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835686 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-client-ca\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835714 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdr52\" (UniqueName: \"kubernetes.io/projected/af67d3eb-5894-4c69-85dc-a01e544a814c-kube-api-access-gdr52\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835772 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf6v6\" (UniqueName: \"kubernetes.io/projected/210b7bb2-9aee-42f0-8bfb-6001edf1c852-kube-api-access-mf6v6\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835787 4784 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835799 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835811 4784 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/210b7bb2-9aee-42f0-8bfb-6001edf1c852-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.835823 4784 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210b7bb2-9aee-42f0-8bfb-6001edf1c852-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.837406 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-client-ca\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.837741 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af67d3eb-5894-4c69-85dc-a01e544a814c-config\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.840471 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af67d3eb-5894-4c69-85dc-a01e544a814c-serving-cert\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.853299 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdr52\" (UniqueName: \"kubernetes.io/projected/af67d3eb-5894-4c69-85dc-a01e544a814c-kube-api-access-gdr52\") pod \"route-controller-manager-65c8945786-kv2q4\" (UID: \"af67d3eb-5894-4c69-85dc-a01e544a814c\") " pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:55 crc kubenswrapper[4784]: I1205 12:30:55.998237 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.057591 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.057590 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cbb748b-zc5nq" event={"ID":"210b7bb2-9aee-42f0-8bfb-6001edf1c852","Type":"ContainerDied","Data":"0e28cfd381b876aa316bb9271946b9473f539650a2ec39c75ead051b0a353496"} Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.057748 4784 scope.go:117] "RemoveContainer" containerID="e87fe77b5296046809c31e8830defec7b994d3b328a1e20661884a18258b536f" Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.061240 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.061290 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms" event={"ID":"9765e425-f320-404c-aff3-7a532e373c0e","Type":"ContainerDied","Data":"7de51b1471d9c74197a0272ffd47ce3a1124675e4ed2f56d9dd84053b5537832"} Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.073623 4784 scope.go:117] "RemoveContainer" containerID="cbe62a97499235b3e09f5dde8108ce890b4ade0f9b571f4d16892748a8171616" Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.102789 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.106449 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-598cbb748b-zc5nq"] Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.116529 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.118936 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-94b4d8c88-mw7ms"] Dec 05 12:30:56 crc kubenswrapper[4784]: I1205 12:30:56.450214 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4"] Dec 05 12:30:56 crc kubenswrapper[4784]: W1205 12:30:56.452509 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf67d3eb_5894_4c69_85dc_a01e544a814c.slice/crio-b32011a68942149082ff84d75db49df9ff5991740a80002bbf916e22c5a95b4c WatchSource:0}: Error finding container b32011a68942149082ff84d75db49df9ff5991740a80002bbf916e22c5a95b4c: Status 404 returned error can't find the container with id b32011a68942149082ff84d75db49df9ff5991740a80002bbf916e22c5a95b4c Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.005047 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" path="/var/lib/kubelet/pods/210b7bb2-9aee-42f0-8bfb-6001edf1c852/volumes" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.005881 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9765e425-f320-404c-aff3-7a532e373c0e" path="/var/lib/kubelet/pods/9765e425-f320-404c-aff3-7a532e373c0e/volumes" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.068029 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" event={"ID":"af67d3eb-5894-4c69-85dc-a01e544a814c","Type":"ContainerStarted","Data":"b32011a68942149082ff84d75db49df9ff5991740a80002bbf916e22c5a95b4c"} Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.773171 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7775848c7d-2knd7"] Dec 05 12:30:57 crc kubenswrapper[4784]: E1205 12:30:57.773416 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerName="controller-manager" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.773431 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerName="controller-manager" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.773566 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="210b7bb2-9aee-42f0-8bfb-6001edf1c852" containerName="controller-manager" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.773950 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.776045 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.776550 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.776589 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.776604 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.777208 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.778891 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.785777 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.794249 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7775848c7d-2knd7"] Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.859931 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-config\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.860108 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1936fffc-bedb-4603-8d78-612a6b5ab791-serving-cert\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.860151 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-proxy-ca-bundles\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.860323 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-client-ca\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.860372 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn4h6\" (UniqueName: \"kubernetes.io/projected/1936fffc-bedb-4603-8d78-612a6b5ab791-kube-api-access-vn4h6\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.961761 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1936fffc-bedb-4603-8d78-612a6b5ab791-serving-cert\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.961833 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-proxy-ca-bundles\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.961890 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-client-ca\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.961921 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn4h6\" (UniqueName: \"kubernetes.io/projected/1936fffc-bedb-4603-8d78-612a6b5ab791-kube-api-access-vn4h6\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.961959 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-config\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.963276 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-client-ca\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.963815 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-proxy-ca-bundles\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.964472 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1936fffc-bedb-4603-8d78-612a6b5ab791-config\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.982472 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1936fffc-bedb-4603-8d78-612a6b5ab791-serving-cert\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:57 crc kubenswrapper[4784]: I1205 12:30:57.986325 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn4h6\" (UniqueName: \"kubernetes.io/projected/1936fffc-bedb-4603-8d78-612a6b5ab791-kube-api-access-vn4h6\") pod \"controller-manager-7775848c7d-2knd7\" (UID: \"1936fffc-bedb-4603-8d78-612a6b5ab791\") " pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.075746 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" event={"ID":"af67d3eb-5894-4c69-85dc-a01e544a814c","Type":"ContainerStarted","Data":"0b0257646934cf161d1e6269ebc1d731cf60da1c6ac2c09ba3563230c45bf0a3"} Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.075977 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.095076 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.101722 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" podStartSLOduration=12.101700786 podStartE2EDuration="12.101700786s" podCreationTimestamp="2025-12-05 12:30:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:58.097656369 +0000 UTC m=+337.517723184" watchObservedRunningTime="2025-12-05 12:30:58.101700786 +0000 UTC m=+337.521767601" Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.206247 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-65c8945786-kv2q4" Dec 05 12:30:58 crc kubenswrapper[4784]: I1205 12:30:58.315155 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7775848c7d-2knd7"] Dec 05 12:30:58 crc kubenswrapper[4784]: W1205 12:30:58.323286 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1936fffc_bedb_4603_8d78_612a6b5ab791.slice/crio-78d03167ace541040476c47c624fd690fcaf087b502d6ded2c4fe9e747d1b5b6 WatchSource:0}: Error finding container 78d03167ace541040476c47c624fd690fcaf087b502d6ded2c4fe9e747d1b5b6: Status 404 returned error can't find the container with id 78d03167ace541040476c47c624fd690fcaf087b502d6ded2c4fe9e747d1b5b6 Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.084839 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" event={"ID":"1936fffc-bedb-4603-8d78-612a6b5ab791","Type":"ContainerStarted","Data":"c4eaea4f306c64ed861b690404ea2ee4ab6fd8d3cab9c05d6c35e946ab3e95de"} Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.084927 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" event={"ID":"1936fffc-bedb-4603-8d78-612a6b5ab791","Type":"ContainerStarted","Data":"78d03167ace541040476c47c624fd690fcaf087b502d6ded2c4fe9e747d1b5b6"} Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.085337 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.090066 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.102856 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7775848c7d-2knd7" podStartSLOduration=13.102838945 podStartE2EDuration="13.102838945s" podCreationTimestamp="2025-12-05 12:30:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:30:59.102484254 +0000 UTC m=+338.522551089" watchObservedRunningTime="2025-12-05 12:30:59.102838945 +0000 UTC m=+338.522905760" Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.572911 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:30:59 crc kubenswrapper[4784]: I1205 12:30:59.573323 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:29 crc kubenswrapper[4784]: I1205 12:31:29.573077 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:29 crc kubenswrapper[4784]: I1205 12:31:29.573693 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:59 crc kubenswrapper[4784]: I1205 12:31:59.573634 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:31:59 crc kubenswrapper[4784]: I1205 12:31:59.574436 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:31:59 crc kubenswrapper[4784]: I1205 12:31:59.574505 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:31:59 crc kubenswrapper[4784]: I1205 12:31:59.575433 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:31:59 crc kubenswrapper[4784]: I1205 12:31:59.575513 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29" gracePeriod=600 Dec 05 12:32:00 crc kubenswrapper[4784]: I1205 12:32:00.429418 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29" exitCode=0 Dec 05 12:32:00 crc kubenswrapper[4784]: I1205 12:32:00.430107 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29"} Dec 05 12:32:00 crc kubenswrapper[4784]: I1205 12:32:00.430228 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3"} Dec 05 12:32:00 crc kubenswrapper[4784]: I1205 12:32:00.430302 4784 scope.go:117] "RemoveContainer" containerID="45f31312230c7d56492145f94ee5dfda841a79b7d35cf4139b4fc09eacf070fe" Dec 05 12:33:59 crc kubenswrapper[4784]: I1205 12:33:59.572490 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:33:59 crc kubenswrapper[4784]: I1205 12:33:59.574012 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:21 crc kubenswrapper[4784]: I1205 12:34:21.245552 4784 scope.go:117] "RemoveContainer" containerID="9d98bde74b060f758faa4205ce89b973e81327287496df2e2b8bc42eb4036271" Dec 05 12:34:29 crc kubenswrapper[4784]: I1205 12:34:29.573442 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:29 crc kubenswrapper[4784]: I1205 12:34:29.574077 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:59 crc kubenswrapper[4784]: I1205 12:34:59.573215 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:34:59 crc kubenswrapper[4784]: I1205 12:34:59.573774 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:34:59 crc kubenswrapper[4784]: I1205 12:34:59.573831 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:34:59 crc kubenswrapper[4784]: I1205 12:34:59.574698 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:34:59 crc kubenswrapper[4784]: I1205 12:34:59.574794 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3" gracePeriod=600 Dec 05 12:35:00 crc kubenswrapper[4784]: I1205 12:35:00.576731 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3" exitCode=0 Dec 05 12:35:00 crc kubenswrapper[4784]: I1205 12:35:00.576818 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3"} Dec 05 12:35:00 crc kubenswrapper[4784]: I1205 12:35:00.578777 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022"} Dec 05 12:35:00 crc kubenswrapper[4784]: I1205 12:35:00.578810 4784 scope.go:117] "RemoveContainer" containerID="ea74219957722474619164ca157e865be958964ef6b321945fde9673ee5d6f29" Dec 05 12:35:21 crc kubenswrapper[4784]: I1205 12:35:21.283912 4784 scope.go:117] "RemoveContainer" containerID="2ef1331e31888aa89cecb3fb44bc46be65bf27fdc801d727e6596e3f734de92d" Dec 05 12:35:21 crc kubenswrapper[4784]: I1205 12:35:21.303060 4784 scope.go:117] "RemoveContainer" containerID="e060f096a1f978333ef685540163907070aba3829d4ee9538807dc9d08399a7b" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.382804 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6fms9"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.384160 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.387305 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.387400 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.388394 4784 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-l9d25" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.396407 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-5vsrc"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.397529 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-5vsrc" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.402656 4784 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-zzq9r" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.406997 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-5vsrc"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.424810 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8brl6"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.425867 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.431534 4784 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-wnc5k" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.445688 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8brl6"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.462340 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6fms9"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.466355 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjdf2\" (UniqueName: \"kubernetes.io/projected/7470ff9d-0206-41d0-b96c-b6618595be7a-kube-api-access-fjdf2\") pod \"cert-manager-5b446d88c5-5vsrc\" (UID: \"7470ff9d-0206-41d0-b96c-b6618595be7a\") " pod="cert-manager/cert-manager-5b446d88c5-5vsrc" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.466442 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr78m\" (UniqueName: \"kubernetes.io/projected/4ae09fde-6000-4f2c-b9bf-ed200fcd83e5-kube-api-access-zr78m\") pod \"cert-manager-webhook-5655c58dd6-8brl6\" (UID: \"4ae09fde-6000-4f2c-b9bf-ed200fcd83e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.466582 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gkp2\" (UniqueName: \"kubernetes.io/projected/90561a13-c4ba-4973-9e21-c96cbea6a0b2-kube-api-access-4gkp2\") pod \"cert-manager-cainjector-7f985d654d-6fms9\" (UID: \"90561a13-c4ba-4973-9e21-c96cbea6a0b2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.567656 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjdf2\" (UniqueName: \"kubernetes.io/projected/7470ff9d-0206-41d0-b96c-b6618595be7a-kube-api-access-fjdf2\") pod \"cert-manager-5b446d88c5-5vsrc\" (UID: \"7470ff9d-0206-41d0-b96c-b6618595be7a\") " pod="cert-manager/cert-manager-5b446d88c5-5vsrc" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.567710 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr78m\" (UniqueName: \"kubernetes.io/projected/4ae09fde-6000-4f2c-b9bf-ed200fcd83e5-kube-api-access-zr78m\") pod \"cert-manager-webhook-5655c58dd6-8brl6\" (UID: \"4ae09fde-6000-4f2c-b9bf-ed200fcd83e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.567762 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gkp2\" (UniqueName: \"kubernetes.io/projected/90561a13-c4ba-4973-9e21-c96cbea6a0b2-kube-api-access-4gkp2\") pod \"cert-manager-cainjector-7f985d654d-6fms9\" (UID: \"90561a13-c4ba-4973-9e21-c96cbea6a0b2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.590252 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr78m\" (UniqueName: \"kubernetes.io/projected/4ae09fde-6000-4f2c-b9bf-ed200fcd83e5-kube-api-access-zr78m\") pod \"cert-manager-webhook-5655c58dd6-8brl6\" (UID: \"4ae09fde-6000-4f2c-b9bf-ed200fcd83e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.590405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjdf2\" (UniqueName: \"kubernetes.io/projected/7470ff9d-0206-41d0-b96c-b6618595be7a-kube-api-access-fjdf2\") pod \"cert-manager-5b446d88c5-5vsrc\" (UID: \"7470ff9d-0206-41d0-b96c-b6618595be7a\") " pod="cert-manager/cert-manager-5b446d88c5-5vsrc" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.595817 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gkp2\" (UniqueName: \"kubernetes.io/projected/90561a13-c4ba-4973-9e21-c96cbea6a0b2-kube-api-access-4gkp2\") pod \"cert-manager-cainjector-7f985d654d-6fms9\" (UID: \"90561a13-c4ba-4973-9e21-c96cbea6a0b2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.699257 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.713344 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-5vsrc" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.740145 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.930101 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6fms9"] Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.940903 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:35:52 crc kubenswrapper[4784]: I1205 12:35:52.969598 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-5vsrc"] Dec 05 12:35:52 crc kubenswrapper[4784]: W1205 12:35:52.971802 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7470ff9d_0206_41d0_b96c_b6618595be7a.slice/crio-566e84106f3a706824253e64922b9472d61a011896e218cb6be34a9fdb787177 WatchSource:0}: Error finding container 566e84106f3a706824253e64922b9472d61a011896e218cb6be34a9fdb787177: Status 404 returned error can't find the container with id 566e84106f3a706824253e64922b9472d61a011896e218cb6be34a9fdb787177 Dec 05 12:35:53 crc kubenswrapper[4784]: I1205 12:35:53.023166 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8brl6"] Dec 05 12:35:53 crc kubenswrapper[4784]: I1205 12:35:53.038958 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" event={"ID":"90561a13-c4ba-4973-9e21-c96cbea6a0b2","Type":"ContainerStarted","Data":"16406245c58b495cbb367c872e94d7d96901b40047ef77bc1a4585a8c6a62617"} Dec 05 12:35:53 crc kubenswrapper[4784]: I1205 12:35:53.040835 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-5vsrc" event={"ID":"7470ff9d-0206-41d0-b96c-b6618595be7a","Type":"ContainerStarted","Data":"566e84106f3a706824253e64922b9472d61a011896e218cb6be34a9fdb787177"} Dec 05 12:35:54 crc kubenswrapper[4784]: I1205 12:35:54.052223 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" event={"ID":"4ae09fde-6000-4f2c-b9bf-ed200fcd83e5","Type":"ContainerStarted","Data":"8195bfb744bb385eb2afa9b5048b2e4b86710681889a37c8729898f626c5453d"} Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.081646 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" event={"ID":"90561a13-c4ba-4973-9e21-c96cbea6a0b2","Type":"ContainerStarted","Data":"f0a1208413782825f5ac380669f69a1acf5df35a0ce3375a56a8388cf94cd902"} Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.083019 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" event={"ID":"4ae09fde-6000-4f2c-b9bf-ed200fcd83e5","Type":"ContainerStarted","Data":"f252d9455b4b3e7ca3001c9a46738d37c0dda16e2881e541162830e819546959"} Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.083105 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.085014 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-5vsrc" event={"ID":"7470ff9d-0206-41d0-b96c-b6618595be7a","Type":"ContainerStarted","Data":"23dcbce85e820b23e052961adaf7f25ad612b1070e4569161e2d34cbda5a59ba"} Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.099055 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-6fms9" podStartSLOduration=1.4541714159999999 podStartE2EDuration="5.099034231s" podCreationTimestamp="2025-12-05 12:35:52 +0000 UTC" firstStartedPulling="2025-12-05 12:35:52.940651219 +0000 UTC m=+632.360718034" lastFinishedPulling="2025-12-05 12:35:56.585514034 +0000 UTC m=+636.005580849" observedRunningTime="2025-12-05 12:35:57.094676522 +0000 UTC m=+636.514743347" watchObservedRunningTime="2025-12-05 12:35:57.099034231 +0000 UTC m=+636.519101056" Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.122242 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" podStartSLOduration=1.5700181610000001 podStartE2EDuration="5.122222524s" podCreationTimestamp="2025-12-05 12:35:52 +0000 UTC" firstStartedPulling="2025-12-05 12:35:53.037826183 +0000 UTC m=+632.457892998" lastFinishedPulling="2025-12-05 12:35:56.590030546 +0000 UTC m=+636.010097361" observedRunningTime="2025-12-05 12:35:57.11736513 +0000 UTC m=+636.537431985" watchObservedRunningTime="2025-12-05 12:35:57.122222524 +0000 UTC m=+636.542289339" Dec 05 12:35:57 crc kubenswrapper[4784]: I1205 12:35:57.139800 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-5vsrc" podStartSLOduration=1.535834419 podStartE2EDuration="5.139784149s" podCreationTimestamp="2025-12-05 12:35:52 +0000 UTC" firstStartedPulling="2025-12-05 12:35:52.974400527 +0000 UTC m=+632.394467342" lastFinishedPulling="2025-12-05 12:35:56.578350267 +0000 UTC m=+635.998417072" observedRunningTime="2025-12-05 12:35:57.137527598 +0000 UTC m=+636.557594423" watchObservedRunningTime="2025-12-05 12:35:57.139784149 +0000 UTC m=+636.559850964" Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.742994 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-8brl6" Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.979652 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fxbpl"] Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980140 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-controller" containerID="cri-o://c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980291 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980278 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="northd" containerID="cri-o://fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980331 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="sbdb" containerID="cri-o://4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980378 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-acl-logging" containerID="cri-o://b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980369 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="nbdb" containerID="cri-o://77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" gracePeriod=30 Dec 05 12:36:02 crc kubenswrapper[4784]: I1205 12:36:02.980373 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-node" containerID="cri-o://3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" gracePeriod=30 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.036121 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" containerID="cri-o://e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" gracePeriod=30 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.131381 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.133807 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-acl-logging/0.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134441 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-controller/0.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134919 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" exitCode=0 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134951 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" exitCode=0 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134966 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" exitCode=143 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134977 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" exitCode=143 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.134995 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.135052 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.135067 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.135080 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.136927 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/2.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.137445 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/1.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.137482 4784 generic.go:334] "Generic (PLEG): container finished" podID="759cb09f-42c3-4254-82f8-b5285b61012a" containerID="edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4" exitCode=2 Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.137505 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerDied","Data":"edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4"} Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.137526 4784 scope.go:117] "RemoveContainer" containerID="58740f457dc5aa8e984e28676df457a65c76e6be7ac9f64348a9d7a25246f8bd" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.138001 4784 scope.go:117] "RemoveContainer" containerID="edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.138179 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-g5gv5_openshift-multus(759cb09f-42c3-4254-82f8-b5285b61012a)\"" pod="openshift-multus/multus-g5gv5" podUID="759cb09f-42c3-4254-82f8-b5285b61012a" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.267601 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.269671 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-acl-logging/0.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.270263 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-controller/0.log" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.270654 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323323 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323381 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323430 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323463 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323501 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323524 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323548 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323580 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323613 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323661 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323687 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323719 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv9d9\" (UniqueName: \"kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323751 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323779 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323802 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323826 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323858 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323879 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323899 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.323922 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd\") pod \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\" (UID: \"291f2a35-7dd5-4af9-87f0-caae4ef75c66\") " Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324844 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324920 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324905 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324954 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324980 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash" (OuterVolumeSpecName: "host-slash") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.324988 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325003 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket" (OuterVolumeSpecName: "log-socket") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325414 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325450 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325473 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325474 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log" (OuterVolumeSpecName: "node-log") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325511 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325495 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325557 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.325815 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.326375 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.330741 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.331306 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9" (OuterVolumeSpecName: "kube-api-access-gv9d9") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "kube-api-access-gv9d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.331510 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.334524 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hm2td"] Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.334899 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="sbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.334930 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="sbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.334954 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="nbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.334965 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="nbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.334983 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.334994 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335014 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kubecfg-setup" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335024 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kubecfg-setup" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335044 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335057 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335074 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="northd" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335085 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="northd" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335101 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335112 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335125 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-acl-logging" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335136 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-acl-logging" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335153 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335165 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335177 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335208 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335223 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335234 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335250 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-node" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335261 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-node" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335412 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="northd" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335427 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="sbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335444 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335461 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335471 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335483 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovn-acl-logging" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335500 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-node" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335516 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335534 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="nbdb" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335546 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: E1205 12:36:03.335721 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335734 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.335885 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.336231 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerName="ovnkube-controller" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.338801 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.341424 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "291f2a35-7dd5-4af9-87f0-caae4ef75c66" (UID: "291f2a35-7dd5-4af9-87f0-caae4ef75c66"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425047 4784 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425093 4784 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425109 4784 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425122 4784 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425134 4784 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425144 4784 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425156 4784 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425167 4784 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425178 4784 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425205 4784 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425216 4784 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425226 4784 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425236 4784 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/291f2a35-7dd5-4af9-87f0-caae4ef75c66-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425247 4784 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425257 4784 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425268 4784 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425278 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv9d9\" (UniqueName: \"kubernetes.io/projected/291f2a35-7dd5-4af9-87f0-caae4ef75c66-kube-api-access-gv9d9\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425291 4784 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425303 4784 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.425315 4784 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/291f2a35-7dd5-4af9-87f0-caae4ef75c66-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526482 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-bin\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526535 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-systemd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526550 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-netd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526596 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-log-socket\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526677 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-node-log\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526696 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-systemd-units\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526712 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526749 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/72b3563e-7682-432a-b3dc-5dedc6e10110-ovn-node-metrics-cert\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526767 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526792 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4bsd\" (UniqueName: \"kubernetes.io/projected/72b3563e-7682-432a-b3dc-5dedc6e10110-kube-api-access-k4bsd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526838 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-slash\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526860 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-env-overrides\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526875 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-etc-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526895 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-netns\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526911 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-config\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526927 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-var-lib-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526943 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-ovn\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.526962 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-script-lib\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.527023 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.527059 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-kubelet\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627715 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627769 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/72b3563e-7682-432a-b3dc-5dedc6e10110-ovn-node-metrics-cert\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627793 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627826 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4bsd\" (UniqueName: \"kubernetes.io/projected/72b3563e-7682-432a-b3dc-5dedc6e10110-kube-api-access-k4bsd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627849 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-slash\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627880 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-etc-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627901 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-env-overrides\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627923 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-netns\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627931 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-slash\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627941 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-config\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628033 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-var-lib-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.627874 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628112 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-ovn\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628073 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-ovn\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628163 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628227 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-var-lib-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628264 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-script-lib\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628269 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-netns\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628255 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-etc-openvswitch\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628303 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628348 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-run-ovn-kubernetes\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628373 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-kubelet\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628406 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-bin\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628418 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-kubelet\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628454 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-systemd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628479 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-bin\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628481 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-netd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628516 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-run-systemd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628521 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-host-cni-netd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628579 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-log-socket\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628631 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-node-log\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628663 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-systemd-units\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628729 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-node-log\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628735 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-systemd-units\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628753 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/72b3563e-7682-432a-b3dc-5dedc6e10110-log-socket\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.628935 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-config\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.629028 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-env-overrides\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.629259 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/72b3563e-7682-432a-b3dc-5dedc6e10110-ovnkube-script-lib\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.631883 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/72b3563e-7682-432a-b3dc-5dedc6e10110-ovn-node-metrics-cert\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.654659 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4bsd\" (UniqueName: \"kubernetes.io/projected/72b3563e-7682-432a-b3dc-5dedc6e10110-kube-api-access-k4bsd\") pod \"ovnkube-node-hm2td\" (UID: \"72b3563e-7682-432a-b3dc-5dedc6e10110\") " pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:03 crc kubenswrapper[4784]: I1205 12:36:03.660414 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.144934 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/2.log" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.147036 4784 generic.go:334] "Generic (PLEG): container finished" podID="72b3563e-7682-432a-b3dc-5dedc6e10110" containerID="85d3d9664ed093e08c571f0d2680474c6ebb6d810df5f4317368313cf091ac4c" exitCode=0 Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.147110 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerDied","Data":"85d3d9664ed093e08c571f0d2680474c6ebb6d810df5f4317368313cf091ac4c"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.147137 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"ac75105cf28b232518c120fb084c38e1ca88cf12da15d302a6377f5012515c40"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.149766 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovnkube-controller/3.log" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.154671 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-acl-logging/0.log" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155131 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fxbpl_291f2a35-7dd5-4af9-87f0-caae4ef75c66/ovn-controller/0.log" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155502 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" exitCode=0 Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155523 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" exitCode=0 Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155531 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" exitCode=0 Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155538 4784 generic.go:334] "Generic (PLEG): container finished" podID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" exitCode=0 Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155558 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155649 4784 scope.go:117] "RemoveContainer" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.155588 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.157067 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.157105 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.157124 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" event={"ID":"291f2a35-7dd5-4af9-87f0-caae4ef75c66","Type":"ContainerDied","Data":"a5b2c66dee0e25112b9df888c2404f4f856ad3027d4cacf746ce6c499e7a9aab"} Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.164273 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fxbpl" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.204415 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.224559 4784 scope.go:117] "RemoveContainer" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.241980 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fxbpl"] Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.247371 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fxbpl"] Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.251160 4784 scope.go:117] "RemoveContainer" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.270251 4784 scope.go:117] "RemoveContainer" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.286539 4784 scope.go:117] "RemoveContainer" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.301612 4784 scope.go:117] "RemoveContainer" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.317086 4784 scope.go:117] "RemoveContainer" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.341369 4784 scope.go:117] "RemoveContainer" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.380929 4784 scope.go:117] "RemoveContainer" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.412314 4784 scope.go:117] "RemoveContainer" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.412865 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": container with ID starting with e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4 not found: ID does not exist" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.412897 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} err="failed to get container status \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": rpc error: code = NotFound desc = could not find container \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": container with ID starting with e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.412918 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.413237 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": container with ID starting with e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566 not found: ID does not exist" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.413340 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} err="failed to get container status \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": rpc error: code = NotFound desc = could not find container \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": container with ID starting with e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.413489 4784 scope.go:117] "RemoveContainer" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.414337 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": container with ID starting with 4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619 not found: ID does not exist" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.414714 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} err="failed to get container status \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": rpc error: code = NotFound desc = could not find container \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": container with ID starting with 4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.414729 4784 scope.go:117] "RemoveContainer" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.414962 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": container with ID starting with 77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd not found: ID does not exist" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.414995 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} err="failed to get container status \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": rpc error: code = NotFound desc = could not find container \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": container with ID starting with 77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.415013 4784 scope.go:117] "RemoveContainer" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.415384 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": container with ID starting with fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d not found: ID does not exist" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.415426 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} err="failed to get container status \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": rpc error: code = NotFound desc = could not find container \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": container with ID starting with fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.415452 4784 scope.go:117] "RemoveContainer" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.415851 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": container with ID starting with 6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd not found: ID does not exist" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.415892 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} err="failed to get container status \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": rpc error: code = NotFound desc = could not find container \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": container with ID starting with 6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.415925 4784 scope.go:117] "RemoveContainer" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.416266 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": container with ID starting with 3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a not found: ID does not exist" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416301 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} err="failed to get container status \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": rpc error: code = NotFound desc = could not find container \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": container with ID starting with 3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416315 4784 scope.go:117] "RemoveContainer" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.416569 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": container with ID starting with b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262 not found: ID does not exist" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416589 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} err="failed to get container status \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": rpc error: code = NotFound desc = could not find container \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": container with ID starting with b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416602 4784 scope.go:117] "RemoveContainer" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.416952 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": container with ID starting with c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966 not found: ID does not exist" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416970 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} err="failed to get container status \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": rpc error: code = NotFound desc = could not find container \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": container with ID starting with c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.416982 4784 scope.go:117] "RemoveContainer" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: E1205 12:36:04.417572 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": container with ID starting with 9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a not found: ID does not exist" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.417605 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a"} err="failed to get container status \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": rpc error: code = NotFound desc = could not find container \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": container with ID starting with 9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.417619 4784 scope.go:117] "RemoveContainer" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.417928 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} err="failed to get container status \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": rpc error: code = NotFound desc = could not find container \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": container with ID starting with e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.417950 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.418317 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} err="failed to get container status \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": rpc error: code = NotFound desc = could not find container \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": container with ID starting with e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.418342 4784 scope.go:117] "RemoveContainer" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.418783 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} err="failed to get container status \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": rpc error: code = NotFound desc = could not find container \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": container with ID starting with 4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.418817 4784 scope.go:117] "RemoveContainer" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.419171 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} err="failed to get container status \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": rpc error: code = NotFound desc = could not find container \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": container with ID starting with 77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.419215 4784 scope.go:117] "RemoveContainer" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420286 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} err="failed to get container status \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": rpc error: code = NotFound desc = could not find container \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": container with ID starting with fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420314 4784 scope.go:117] "RemoveContainer" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420627 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} err="failed to get container status \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": rpc error: code = NotFound desc = could not find container \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": container with ID starting with 6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420652 4784 scope.go:117] "RemoveContainer" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420938 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} err="failed to get container status \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": rpc error: code = NotFound desc = could not find container \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": container with ID starting with 3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.420990 4784 scope.go:117] "RemoveContainer" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.421260 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} err="failed to get container status \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": rpc error: code = NotFound desc = could not find container \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": container with ID starting with b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.421284 4784 scope.go:117] "RemoveContainer" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.421814 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} err="failed to get container status \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": rpc error: code = NotFound desc = could not find container \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": container with ID starting with c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.421913 4784 scope.go:117] "RemoveContainer" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.422349 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a"} err="failed to get container status \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": rpc error: code = NotFound desc = could not find container \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": container with ID starting with 9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.422451 4784 scope.go:117] "RemoveContainer" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.422768 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} err="failed to get container status \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": rpc error: code = NotFound desc = could not find container \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": container with ID starting with e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.422792 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.423395 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} err="failed to get container status \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": rpc error: code = NotFound desc = could not find container \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": container with ID starting with e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.423421 4784 scope.go:117] "RemoveContainer" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.423766 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} err="failed to get container status \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": rpc error: code = NotFound desc = could not find container \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": container with ID starting with 4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.423849 4784 scope.go:117] "RemoveContainer" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.424366 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} err="failed to get container status \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": rpc error: code = NotFound desc = could not find container \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": container with ID starting with 77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.424415 4784 scope.go:117] "RemoveContainer" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.424624 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} err="failed to get container status \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": rpc error: code = NotFound desc = could not find container \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": container with ID starting with fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.424646 4784 scope.go:117] "RemoveContainer" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.424926 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} err="failed to get container status \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": rpc error: code = NotFound desc = could not find container \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": container with ID starting with 6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.425016 4784 scope.go:117] "RemoveContainer" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.425375 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} err="failed to get container status \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": rpc error: code = NotFound desc = could not find container \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": container with ID starting with 3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.425399 4784 scope.go:117] "RemoveContainer" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.425762 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} err="failed to get container status \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": rpc error: code = NotFound desc = could not find container \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": container with ID starting with b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.425790 4784 scope.go:117] "RemoveContainer" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426042 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} err="failed to get container status \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": rpc error: code = NotFound desc = could not find container \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": container with ID starting with c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426094 4784 scope.go:117] "RemoveContainer" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426453 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a"} err="failed to get container status \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": rpc error: code = NotFound desc = could not find container \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": container with ID starting with 9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426477 4784 scope.go:117] "RemoveContainer" containerID="e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426806 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4"} err="failed to get container status \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": rpc error: code = NotFound desc = could not find container \"e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4\": container with ID starting with e727e38f6d5f43540b18e918bc7541698e176628300db91c6c66b395bc5e5ad4 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.426856 4784 scope.go:117] "RemoveContainer" containerID="e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427081 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566"} err="failed to get container status \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": rpc error: code = NotFound desc = could not find container \"e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566\": container with ID starting with e1b0328193847e770f1dc35e11b6fa43f0f7a15326972cda4b409ca656d30566 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427104 4784 scope.go:117] "RemoveContainer" containerID="4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427377 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619"} err="failed to get container status \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": rpc error: code = NotFound desc = could not find container \"4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619\": container with ID starting with 4966535e1113a44034766b166050425960ad99647f9bbd30d1e2079396da1619 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427400 4784 scope.go:117] "RemoveContainer" containerID="77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427626 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd"} err="failed to get container status \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": rpc error: code = NotFound desc = could not find container \"77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd\": container with ID starting with 77241dcac873dbef08ffddacf1c69d2905e572c38b3babe4a3d214eb66fccfbd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427649 4784 scope.go:117] "RemoveContainer" containerID="fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427895 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d"} err="failed to get container status \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": rpc error: code = NotFound desc = could not find container \"fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d\": container with ID starting with fcda1b7f618c31fc6f0f40c0c1c68c243ea0289817310c8c09053b393f7a094d not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.427917 4784 scope.go:117] "RemoveContainer" containerID="6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428150 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd"} err="failed to get container status \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": rpc error: code = NotFound desc = could not find container \"6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd\": container with ID starting with 6c47f7445ffaf16de59cedb8003a03faafd4c080f4a99d74e6fec3208ffc1ccd not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428167 4784 scope.go:117] "RemoveContainer" containerID="3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428458 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a"} err="failed to get container status \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": rpc error: code = NotFound desc = could not find container \"3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a\": container with ID starting with 3c41d684797aa06585e4cc39d05fd56e8044455f43d2df42e1b3a35bf28c269a not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428478 4784 scope.go:117] "RemoveContainer" containerID="b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428742 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262"} err="failed to get container status \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": rpc error: code = NotFound desc = could not find container \"b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262\": container with ID starting with b404d8cd792c7f8df4eac30ece48152d7a58adab402a5d80b268886149bd7262 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.428763 4784 scope.go:117] "RemoveContainer" containerID="c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.429158 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966"} err="failed to get container status \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": rpc error: code = NotFound desc = could not find container \"c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966\": container with ID starting with c86ad4ebfad869da0e66c5fe07e6ac4327c54668351c90a54791d59fc4157966 not found: ID does not exist" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.429177 4784 scope.go:117] "RemoveContainer" containerID="9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a" Dec 05 12:36:04 crc kubenswrapper[4784]: I1205 12:36:04.429499 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a"} err="failed to get container status \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": rpc error: code = NotFound desc = could not find container \"9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a\": container with ID starting with 9b89cf13110bb81c70b2b34ba88d01dfeb0490fbeb7b711ff8e2465626a2804a not found: ID does not exist" Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.012456 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="291f2a35-7dd5-4af9-87f0-caae4ef75c66" path="/var/lib/kubelet/pods/291f2a35-7dd5-4af9-87f0-caae4ef75c66/volumes" Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167157 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"aa193342158de95f2b1133fbb025e5e1f5d669282a978b28ee230ebd4f15852b"} Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167249 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"241a8ab3f31dd6ed89d89d103d3b9ef4aca764a1942ce77b5b5869d8c83d1237"} Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167271 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"8d06d0e5b20406c6d6428e23298a940d5d72c0c9fed33cf85eec5a83c9a62262"} Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167288 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"38d4e77a3e1d74e84b9f9f261efd042f6fbaa6a0eebb53e650f38919a43fca86"} Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167304 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"e5c37639e79528aa7b01022992cc35794f8cdda4d6ce0adcc2ceeb58af3efa73"} Dec 05 12:36:05 crc kubenswrapper[4784]: I1205 12:36:05.167319 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"dbff310685e8534fee6529259735345544c43c9e306ad53c4263c83bfd1ad7ea"} Dec 05 12:36:07 crc kubenswrapper[4784]: I1205 12:36:07.186113 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"997fc7414c2d8bf5471c2f10960dbef53ba48b839c454db5d56608fa51677a1e"} Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.214624 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" event={"ID":"72b3563e-7682-432a-b3dc-5dedc6e10110","Type":"ContainerStarted","Data":"f10cf57e5ca1257f52e504bffc53f11888ad3a206b0fae40b9dff8d2b76279f7"} Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.215167 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.215178 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.215200 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.243011 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.250580 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:10 crc kubenswrapper[4784]: I1205 12:36:10.258495 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" podStartSLOduration=7.258465333 podStartE2EDuration="7.258465333s" podCreationTimestamp="2025-12-05 12:36:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:36:10.252820364 +0000 UTC m=+649.672887179" watchObservedRunningTime="2025-12-05 12:36:10.258465333 +0000 UTC m=+649.678532158" Dec 05 12:36:13 crc kubenswrapper[4784]: I1205 12:36:13.998717 4784 scope.go:117] "RemoveContainer" containerID="edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4" Dec 05 12:36:14 crc kubenswrapper[4784]: E1205 12:36:14.000997 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-g5gv5_openshift-multus(759cb09f-42c3-4254-82f8-b5285b61012a)\"" pod="openshift-multus/multus-g5gv5" podUID="759cb09f-42c3-4254-82f8-b5285b61012a" Dec 05 12:36:28 crc kubenswrapper[4784]: I1205 12:36:27.999126 4784 scope.go:117] "RemoveContainer" containerID="edb0209f26abd1d599a14ecc0e1ca91e488e5f887ebf0f77ab6a5df65eb5dde4" Dec 05 12:36:28 crc kubenswrapper[4784]: I1205 12:36:28.324487 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-g5gv5_759cb09f-42c3-4254-82f8-b5285b61012a/kube-multus/2.log" Dec 05 12:36:28 crc kubenswrapper[4784]: I1205 12:36:28.324874 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-g5gv5" event={"ID":"759cb09f-42c3-4254-82f8-b5285b61012a","Type":"ContainerStarted","Data":"064d0d0eff278601a0db8b20b45de807b540b809ea4eb84367a71273ea428e64"} Dec 05 12:36:31 crc kubenswrapper[4784]: I1205 12:36:31.947451 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c"] Dec 05 12:36:31 crc kubenswrapper[4784]: I1205 12:36:31.953786 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:31 crc kubenswrapper[4784]: I1205 12:36:31.959769 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:36:31 crc kubenswrapper[4784]: I1205 12:36:31.966415 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c"] Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.013016 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.013245 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wztcd\" (UniqueName: \"kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.013321 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.115023 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.115109 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wztcd\" (UniqueName: \"kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.115139 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.115780 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.115784 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.139161 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wztcd\" (UniqueName: \"kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.271427 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:32 crc kubenswrapper[4784]: I1205 12:36:32.454672 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c"] Dec 05 12:36:33 crc kubenswrapper[4784]: I1205 12:36:33.358590 4784 generic.go:334] "Generic (PLEG): container finished" podID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerID="93eebd545ccea9f30abd962b80437dd0b2054f245a856bebc6afbac3f5a1874f" exitCode=0 Dec 05 12:36:33 crc kubenswrapper[4784]: I1205 12:36:33.358704 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" event={"ID":"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb","Type":"ContainerDied","Data":"93eebd545ccea9f30abd962b80437dd0b2054f245a856bebc6afbac3f5a1874f"} Dec 05 12:36:33 crc kubenswrapper[4784]: I1205 12:36:33.359002 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" event={"ID":"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb","Type":"ContainerStarted","Data":"8bbb5d26382416cf2e6fa5e3f10f87cf007933db3fe2c5097fe43facbb623f47"} Dec 05 12:36:33 crc kubenswrapper[4784]: I1205 12:36:33.681861 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hm2td" Dec 05 12:36:36 crc kubenswrapper[4784]: I1205 12:36:36.376530 4784 generic.go:334] "Generic (PLEG): container finished" podID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerID="fad90a58f7304d825df015cb83ef42a22b10898927468cdabc3171317aa8df76" exitCode=0 Dec 05 12:36:36 crc kubenswrapper[4784]: I1205 12:36:36.376619 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" event={"ID":"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb","Type":"ContainerDied","Data":"fad90a58f7304d825df015cb83ef42a22b10898927468cdabc3171317aa8df76"} Dec 05 12:36:37 crc kubenswrapper[4784]: I1205 12:36:37.385514 4784 generic.go:334] "Generic (PLEG): container finished" podID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerID="bbdfe21e6cbb627ba16238d8eed5320a3876a55009e738d7758bfb31fe174669" exitCode=0 Dec 05 12:36:37 crc kubenswrapper[4784]: I1205 12:36:37.385559 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" event={"ID":"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb","Type":"ContainerDied","Data":"bbdfe21e6cbb627ba16238d8eed5320a3876a55009e738d7758bfb31fe174669"} Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.688252 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.797493 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle\") pod \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.797746 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wztcd\" (UniqueName: \"kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd\") pod \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.797845 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util\") pod \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\" (UID: \"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb\") " Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.802267 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle" (OuterVolumeSpecName: "bundle") pod "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" (UID: "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.807788 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd" (OuterVolumeSpecName: "kube-api-access-wztcd") pod "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" (UID: "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb"). InnerVolumeSpecName "kube-api-access-wztcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.824432 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util" (OuterVolumeSpecName: "util") pod "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" (UID: "06a04def-1da6-41fc-9aa1-9a6d5a2dcafb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.899712 4784 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.899750 4784 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:38 crc kubenswrapper[4784]: I1205 12:36:38.899762 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wztcd\" (UniqueName: \"kubernetes.io/projected/06a04def-1da6-41fc-9aa1-9a6d5a2dcafb-kube-api-access-wztcd\") on node \"crc\" DevicePath \"\"" Dec 05 12:36:39 crc kubenswrapper[4784]: I1205 12:36:39.405696 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" event={"ID":"06a04def-1da6-41fc-9aa1-9a6d5a2dcafb","Type":"ContainerDied","Data":"8bbb5d26382416cf2e6fa5e3f10f87cf007933db3fe2c5097fe43facbb623f47"} Dec 05 12:36:39 crc kubenswrapper[4784]: I1205 12:36:39.406093 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bbb5d26382416cf2e6fa5e3f10f87cf007933db3fe2c5097fe43facbb623f47" Dec 05 12:36:39 crc kubenswrapper[4784]: I1205 12:36:39.405798 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.642705 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp"] Dec 05 12:36:49 crc kubenswrapper[4784]: E1205 12:36:49.644578 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="extract" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.644689 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="extract" Dec 05 12:36:49 crc kubenswrapper[4784]: E1205 12:36:49.644782 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="pull" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.644841 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="pull" Dec 05 12:36:49 crc kubenswrapper[4784]: E1205 12:36:49.644907 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="util" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.644985 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="util" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.645164 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a04def-1da6-41fc-9aa1-9a6d5a2dcafb" containerName="extract" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.645757 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.648611 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.658629 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.662614 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-8nzrb" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.663112 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.738525 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbvr5\" (UniqueName: \"kubernetes.io/projected/954cb856-d909-4541-89c7-7c38bf8d8618-kube-api-access-dbvr5\") pod \"obo-prometheus-operator-668cf9dfbb-q5lxp\" (UID: \"954cb856-d909-4541-89c7-7c38bf8d8618\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.773127 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.774220 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.776862 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-sks8f" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.777484 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.789809 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.790543 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.794721 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.830977 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.839851 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.839894 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.839930 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbvr5\" (UniqueName: \"kubernetes.io/projected/954cb856-d909-4541-89c7-7c38bf8d8618-kube-api-access-dbvr5\") pod \"obo-prometheus-operator-668cf9dfbb-q5lxp\" (UID: \"954cb856-d909-4541-89c7-7c38bf8d8618\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.839958 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.839977 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.884452 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbvr5\" (UniqueName: \"kubernetes.io/projected/954cb856-d909-4541-89c7-7c38bf8d8618-kube-api-access-dbvr5\") pod \"obo-prometheus-operator-668cf9dfbb-q5lxp\" (UID: \"954cb856-d909-4541-89c7-7c38bf8d8618\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.941522 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.941566 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.941607 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.941626 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.945312 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.945576 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.945909 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6ba5df2-9910-453c-9993-fca6642b4e8e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-869qk\" (UID: \"e6ba5df2-9910-453c-9993-fca6642b4e8e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.954064 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/573cb676-d704-4d0d-852c-582d38a64cdb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-848d96db67-6k7p8\" (UID: \"573cb676-d704-4d0d-852c-582d38a64cdb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.964314 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.972731 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m29qp"] Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.973965 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.976098 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-zb4x2" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.976681 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 12:36:49 crc kubenswrapper[4784]: I1205 12:36:49.992295 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m29qp"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.044795 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdf43736-9e51-4d7d-8290-075b7f058f62-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.044949 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6pmg\" (UniqueName: \"kubernetes.io/projected/bdf43736-9e51-4d7d-8290-075b7f058f62-kube-api-access-x6pmg\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.097968 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.111136 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.146220 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6pmg\" (UniqueName: \"kubernetes.io/projected/bdf43736-9e51-4d7d-8290-075b7f058f62-kube-api-access-x6pmg\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.146324 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdf43736-9e51-4d7d-8290-075b7f058f62-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.157972 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdf43736-9e51-4d7d-8290-075b7f058f62-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.168009 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6pmg\" (UniqueName: \"kubernetes.io/projected/bdf43736-9e51-4d7d-8290-075b7f058f62-kube-api-access-x6pmg\") pod \"observability-operator-d8bb48f5d-m29qp\" (UID: \"bdf43736-9e51-4d7d-8290-075b7f058f62\") " pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.186940 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-6j8bm"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.187597 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.193464 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-88bq8" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.196776 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-6j8bm"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.220629 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.247179 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a56fb787-b445-49b8-a50c-5cddf822fc68-openshift-service-ca\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.247271 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69mdv\" (UniqueName: \"kubernetes.io/projected/a56fb787-b445-49b8-a50c-5cddf822fc68-kube-api-access-69mdv\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: W1205 12:36:50.248232 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod954cb856_d909_4541_89c7_7c38bf8d8618.slice/crio-a452a41320b7fb50d96ca51dfa64d6c9c8d9e971e1f9aa5e04e86d50d792cce3 WatchSource:0}: Error finding container a452a41320b7fb50d96ca51dfa64d6c9c8d9e971e1f9aa5e04e86d50d792cce3: Status 404 returned error can't find the container with id a452a41320b7fb50d96ca51dfa64d6c9c8d9e971e1f9aa5e04e86d50d792cce3 Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.339241 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.348418 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a56fb787-b445-49b8-a50c-5cddf822fc68-openshift-service-ca\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.348513 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69mdv\" (UniqueName: \"kubernetes.io/projected/a56fb787-b445-49b8-a50c-5cddf822fc68-kube-api-access-69mdv\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.349749 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a56fb787-b445-49b8-a50c-5cddf822fc68-openshift-service-ca\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.364823 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69mdv\" (UniqueName: \"kubernetes.io/projected/a56fb787-b445-49b8-a50c-5cddf822fc68-kube-api-access-69mdv\") pod \"perses-operator-5446b9c989-6j8bm\" (UID: \"a56fb787-b445-49b8-a50c-5cddf822fc68\") " pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.389508 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8"] Dec 05 12:36:50 crc kubenswrapper[4784]: W1205 12:36:50.401374 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod573cb676_d704_4d0d_852c_582d38a64cdb.slice/crio-82bee79216efe3075d573e6f7d6b49fedda6ff51d7af9624df58965fcb1bf999 WatchSource:0}: Error finding container 82bee79216efe3075d573e6f7d6b49fedda6ff51d7af9624df58965fcb1bf999: Status 404 returned error can't find the container with id 82bee79216efe3075d573e6f7d6b49fedda6ff51d7af9624df58965fcb1bf999 Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.461423 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.475286 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" event={"ID":"954cb856-d909-4541-89c7-7c38bf8d8618","Type":"ContainerStarted","Data":"a452a41320b7fb50d96ca51dfa64d6c9c8d9e971e1f9aa5e04e86d50d792cce3"} Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.476523 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" event={"ID":"573cb676-d704-4d0d-852c-582d38a64cdb","Type":"ContainerStarted","Data":"82bee79216efe3075d573e6f7d6b49fedda6ff51d7af9624df58965fcb1bf999"} Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.516531 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.656571 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-m29qp"] Dec 05 12:36:50 crc kubenswrapper[4784]: I1205 12:36:50.807648 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-6j8bm"] Dec 05 12:36:50 crc kubenswrapper[4784]: W1205 12:36:50.814294 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda56fb787_b445_49b8_a50c_5cddf822fc68.slice/crio-355fad9e259515c25bc2221c9c7547b64a03bc94f75f139f7e3a3068fe61e6a1 WatchSource:0}: Error finding container 355fad9e259515c25bc2221c9c7547b64a03bc94f75f139f7e3a3068fe61e6a1: Status 404 returned error can't find the container with id 355fad9e259515c25bc2221c9c7547b64a03bc94f75f139f7e3a3068fe61e6a1 Dec 05 12:36:51 crc kubenswrapper[4784]: I1205 12:36:51.482491 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" event={"ID":"e6ba5df2-9910-453c-9993-fca6642b4e8e","Type":"ContainerStarted","Data":"be4fc98011ec05446331809aaab9ea32bd98235145e687fbd44a016a802c83ed"} Dec 05 12:36:51 crc kubenswrapper[4784]: I1205 12:36:51.493640 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" event={"ID":"bdf43736-9e51-4d7d-8290-075b7f058f62","Type":"ContainerStarted","Data":"a468edc23de625856ef1807b2c6f731646b5c2ae200a129cf14e2e836eea4ebf"} Dec 05 12:36:51 crc kubenswrapper[4784]: I1205 12:36:51.495105 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" event={"ID":"a56fb787-b445-49b8-a50c-5cddf822fc68","Type":"ContainerStarted","Data":"355fad9e259515c25bc2221c9c7547b64a03bc94f75f139f7e3a3068fe61e6a1"} Dec 05 12:36:59 crc kubenswrapper[4784]: I1205 12:36:59.572629 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:36:59 crc kubenswrapper[4784]: I1205 12:36:59.573128 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:37:05 crc kubenswrapper[4784]: E1205 12:37:05.045209 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 05 12:37:05 crc kubenswrapper[4784]: E1205 12:37:05.046468 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-848d96db67-869qk_openshift-operators(e6ba5df2-9910-453c-9993-fca6642b4e8e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:37:05 crc kubenswrapper[4784]: E1205 12:37:05.047664 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" podUID="e6ba5df2-9910-453c-9993-fca6642b4e8e" Dec 05 12:37:07 crc kubenswrapper[4784]: I1205 12:37:07.345426 4784 patch_prober.go:28] interesting pod/openshift-kube-scheduler-crc container/kube-scheduler namespace/openshift-kube-scheduler: Readiness probe status=failure output="Get \"https://192.168.126.11:10259/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 12:37:07 crc kubenswrapper[4784]: I1205 12:37:07.346012 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podUID="3dcd261975c3d6b9a6ad6367fd4facd3" containerName="kube-scheduler" probeResult="failure" output="Get \"https://192.168.126.11:10259/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.533979 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" event={"ID":"954cb856-d909-4541-89c7-7c38bf8d8618","Type":"ContainerStarted","Data":"c7dbd2a7a495ca3002f41962d1b4f98bff83d3d2defb0c0d00e8c90915a5c6eb"} Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.536816 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" event={"ID":"a56fb787-b445-49b8-a50c-5cddf822fc68","Type":"ContainerStarted","Data":"e3ccf373e9ce7e7fbcb83f80830e6cc84210fb336e9b5748fb3ebc626cb8f401"} Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.536910 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.538482 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" event={"ID":"e6ba5df2-9910-453c-9993-fca6642b4e8e","Type":"ContainerStarted","Data":"3b77d5710f6354e3dc373d390aa34b37235e600c5f0d81aeb10911b3bf9f15ef"} Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.540657 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" event={"ID":"bdf43736-9e51-4d7d-8290-075b7f058f62","Type":"ContainerStarted","Data":"b4e1e96d0149dfdc8435ac70cb0b872c77b652cbb1d4704edd3c3d524f1632b7"} Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.541612 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.544693 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" event={"ID":"573cb676-d704-4d0d-852c-582d38a64cdb","Type":"ContainerStarted","Data":"c6ba72843d53a259c3ebe69e19d4b3d8102b8fa5eef971a23dc69d4378d80ff1"} Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.545512 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.564456 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-q5lxp" podStartSLOduration=4.690791722 podStartE2EDuration="19.564427729s" podCreationTimestamp="2025-12-05 12:36:49 +0000 UTC" firstStartedPulling="2025-12-05 12:36:50.263922595 +0000 UTC m=+689.683989400" lastFinishedPulling="2025-12-05 12:37:05.137558592 +0000 UTC m=+704.557625407" observedRunningTime="2025-12-05 12:37:08.559335778 +0000 UTC m=+707.979402613" watchObservedRunningTime="2025-12-05 12:37:08.564427729 +0000 UTC m=+707.984494544" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.604410 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" podStartSLOduration=4.322156045 podStartE2EDuration="18.604381813s" podCreationTimestamp="2025-12-05 12:36:50 +0000 UTC" firstStartedPulling="2025-12-05 12:36:50.819915684 +0000 UTC m=+690.239982509" lastFinishedPulling="2025-12-05 12:37:05.102141462 +0000 UTC m=+704.522208277" observedRunningTime="2025-12-05 12:37:08.600002375 +0000 UTC m=+708.020069190" watchObservedRunningTime="2025-12-05 12:37:08.604381813 +0000 UTC m=+708.024448628" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.638716 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-6k7p8" podStartSLOduration=4.946700658 podStartE2EDuration="19.638688829s" podCreationTimestamp="2025-12-05 12:36:49 +0000 UTC" firstStartedPulling="2025-12-05 12:36:50.407357053 +0000 UTC m=+689.827423868" lastFinishedPulling="2025-12-05 12:37:05.099345224 +0000 UTC m=+704.519412039" observedRunningTime="2025-12-05 12:37:08.633675051 +0000 UTC m=+708.053741866" watchObservedRunningTime="2025-12-05 12:37:08.638688829 +0000 UTC m=+708.058755644" Dec 05 12:37:08 crc kubenswrapper[4784]: I1205 12:37:08.683258 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-m29qp" podStartSLOduration=5.202631063 podStartE2EDuration="19.683239188s" podCreationTimestamp="2025-12-05 12:36:49 +0000 UTC" firstStartedPulling="2025-12-05 12:36:50.666783669 +0000 UTC m=+690.086850484" lastFinishedPulling="2025-12-05 12:37:05.147391794 +0000 UTC m=+704.567458609" observedRunningTime="2025-12-05 12:37:08.681831083 +0000 UTC m=+708.101897898" watchObservedRunningTime="2025-12-05 12:37:08.683239188 +0000 UTC m=+708.103306003" Dec 05 12:37:20 crc kubenswrapper[4784]: I1205 12:37:20.520994 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-6j8bm" Dec 05 12:37:20 crc kubenswrapper[4784]: I1205 12:37:20.549178 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-848d96db67-869qk" podStartSLOduration=-9223372005.305622 podStartE2EDuration="31.54915429s" podCreationTimestamp="2025-12-05 12:36:49 +0000 UTC" firstStartedPulling="2025-12-05 12:36:50.479391481 +0000 UTC m=+689.899458296" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:37:08.706488904 +0000 UTC m=+708.126555729" watchObservedRunningTime="2025-12-05 12:37:20.54915429 +0000 UTC m=+719.969221115" Dec 05 12:37:29 crc kubenswrapper[4784]: I1205 12:37:29.573167 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:37:29 crc kubenswrapper[4784]: I1205 12:37:29.573931 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.263508 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w"] Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.265490 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.269467 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.276169 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w"] Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.297221 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zh2z\" (UniqueName: \"kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.297323 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.297353 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.398644 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.398714 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.398747 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zh2z\" (UniqueName: \"kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.399385 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.399481 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.425451 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zh2z\" (UniqueName: \"kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:38 crc kubenswrapper[4784]: I1205 12:37:38.693593 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:39 crc kubenswrapper[4784]: I1205 12:37:39.138206 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w"] Dec 05 12:37:39 crc kubenswrapper[4784]: I1205 12:37:39.723740 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" event={"ID":"1efb6a43-d637-4f19-8514-ee271c6aea44","Type":"ContainerStarted","Data":"9d84d3b5e8e17a7bed4ce73611b8d0da3c4edccc0be170b2c36f54ff8a917e3e"} Dec 05 12:37:43 crc kubenswrapper[4784]: I1205 12:37:43.738406 4784 generic.go:334] "Generic (PLEG): container finished" podID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerID="d0f17f2e0df41ea34bf0f716182775b681f43ee249c854167bf1056556d16c5e" exitCode=0 Dec 05 12:37:43 crc kubenswrapper[4784]: I1205 12:37:43.738532 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" event={"ID":"1efb6a43-d637-4f19-8514-ee271c6aea44","Type":"ContainerDied","Data":"d0f17f2e0df41ea34bf0f716182775b681f43ee249c854167bf1056556d16c5e"} Dec 05 12:37:45 crc kubenswrapper[4784]: I1205 12:37:45.753251 4784 generic.go:334] "Generic (PLEG): container finished" podID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerID="798c672c956887170e419d384e6d4a19e6945bd129b88b3f1ae2a1b029386d68" exitCode=0 Dec 05 12:37:45 crc kubenswrapper[4784]: I1205 12:37:45.753313 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" event={"ID":"1efb6a43-d637-4f19-8514-ee271c6aea44","Type":"ContainerDied","Data":"798c672c956887170e419d384e6d4a19e6945bd129b88b3f1ae2a1b029386d68"} Dec 05 12:37:46 crc kubenswrapper[4784]: I1205 12:37:46.762899 4784 generic.go:334] "Generic (PLEG): container finished" podID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerID="2f591b796e8ac4b3121046bdd4e106ed33f77820db3509d6db0f3e0674a8f50f" exitCode=0 Dec 05 12:37:46 crc kubenswrapper[4784]: I1205 12:37:46.763008 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" event={"ID":"1efb6a43-d637-4f19-8514-ee271c6aea44","Type":"ContainerDied","Data":"2f591b796e8ac4b3121046bdd4e106ed33f77820db3509d6db0f3e0674a8f50f"} Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.106980 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.196331 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle\") pod \"1efb6a43-d637-4f19-8514-ee271c6aea44\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.196426 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util\") pod \"1efb6a43-d637-4f19-8514-ee271c6aea44\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.196503 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zh2z\" (UniqueName: \"kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z\") pod \"1efb6a43-d637-4f19-8514-ee271c6aea44\" (UID: \"1efb6a43-d637-4f19-8514-ee271c6aea44\") " Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.197332 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle" (OuterVolumeSpecName: "bundle") pod "1efb6a43-d637-4f19-8514-ee271c6aea44" (UID: "1efb6a43-d637-4f19-8514-ee271c6aea44"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.204572 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z" (OuterVolumeSpecName: "kube-api-access-2zh2z") pod "1efb6a43-d637-4f19-8514-ee271c6aea44" (UID: "1efb6a43-d637-4f19-8514-ee271c6aea44"). InnerVolumeSpecName "kube-api-access-2zh2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.215256 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util" (OuterVolumeSpecName: "util") pod "1efb6a43-d637-4f19-8514-ee271c6aea44" (UID: "1efb6a43-d637-4f19-8514-ee271c6aea44"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.298588 4784 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.298642 4784 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1efb6a43-d637-4f19-8514-ee271c6aea44-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.298663 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zh2z\" (UniqueName: \"kubernetes.io/projected/1efb6a43-d637-4f19-8514-ee271c6aea44-kube-api-access-2zh2z\") on node \"crc\" DevicePath \"\"" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.779971 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" event={"ID":"1efb6a43-d637-4f19-8514-ee271c6aea44","Type":"ContainerDied","Data":"9d84d3b5e8e17a7bed4ce73611b8d0da3c4edccc0be170b2c36f54ff8a917e3e"} Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.780038 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d84d3b5e8e17a7bed4ce73611b8d0da3c4edccc0be170b2c36f54ff8a917e3e" Dec 05 12:37:48 crc kubenswrapper[4784]: I1205 12:37:48.780113 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.871500 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c"] Dec 05 12:37:54 crc kubenswrapper[4784]: E1205 12:37:54.872207 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="pull" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.872219 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="pull" Dec 05 12:37:54 crc kubenswrapper[4784]: E1205 12:37:54.872228 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="util" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.872234 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="util" Dec 05 12:37:54 crc kubenswrapper[4784]: E1205 12:37:54.872249 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="extract" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.872255 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="extract" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.872346 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1efb6a43-d637-4f19-8514-ee271c6aea44" containerName="extract" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.872715 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.874529 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.874664 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.874605 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-zhk4l" Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.891502 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c"] Dec 05 12:37:54 crc kubenswrapper[4784]: I1205 12:37:54.983414 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnt59\" (UniqueName: \"kubernetes.io/projected/200a7d52-d7cb-4b5e-91a8-d03a2f181b01-kube-api-access-qnt59\") pod \"nmstate-operator-5b5b58f5c8-5zl2c\" (UID: \"200a7d52-d7cb-4b5e-91a8-d03a2f181b01\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" Dec 05 12:37:55 crc kubenswrapper[4784]: I1205 12:37:55.084753 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnt59\" (UniqueName: \"kubernetes.io/projected/200a7d52-d7cb-4b5e-91a8-d03a2f181b01-kube-api-access-qnt59\") pod \"nmstate-operator-5b5b58f5c8-5zl2c\" (UID: \"200a7d52-d7cb-4b5e-91a8-d03a2f181b01\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" Dec 05 12:37:55 crc kubenswrapper[4784]: I1205 12:37:55.114437 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnt59\" (UniqueName: \"kubernetes.io/projected/200a7d52-d7cb-4b5e-91a8-d03a2f181b01-kube-api-access-qnt59\") pod \"nmstate-operator-5b5b58f5c8-5zl2c\" (UID: \"200a7d52-d7cb-4b5e-91a8-d03a2f181b01\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" Dec 05 12:37:55 crc kubenswrapper[4784]: I1205 12:37:55.192105 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" Dec 05 12:37:55 crc kubenswrapper[4784]: I1205 12:37:55.448206 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c"] Dec 05 12:37:55 crc kubenswrapper[4784]: I1205 12:37:55.821728 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" event={"ID":"200a7d52-d7cb-4b5e-91a8-d03a2f181b01","Type":"ContainerStarted","Data":"67a38b373abcdd45e38695a9f59ab63893bd09c1329f13dca162ffdd99dda906"} Dec 05 12:37:57 crc kubenswrapper[4784]: I1205 12:37:57.834885 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" event={"ID":"200a7d52-d7cb-4b5e-91a8-d03a2f181b01","Type":"ContainerStarted","Data":"62f00a3c2104da29fcde394f33534455f76d1867af6080dbf6ab7a768c0bb659"} Dec 05 12:37:57 crc kubenswrapper[4784]: I1205 12:37:57.852988 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5zl2c" podStartSLOduration=1.7620736479999999 podStartE2EDuration="3.852972664s" podCreationTimestamp="2025-12-05 12:37:54 +0000 UTC" firstStartedPulling="2025-12-05 12:37:55.469069224 +0000 UTC m=+754.889136039" lastFinishedPulling="2025-12-05 12:37:57.55996824 +0000 UTC m=+756.980035055" observedRunningTime="2025-12-05 12:37:57.84789497 +0000 UTC m=+757.267961785" watchObservedRunningTime="2025-12-05 12:37:57.852972664 +0000 UTC m=+757.273039469" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.427554 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.430407 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.444153 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-qb8kj" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.448418 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.472560 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.472595 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-c9l7g"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.473763 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.475030 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.475268 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.486917 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543453 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/081a21fa-325e-4018-9ce0-abc2bb1899ec-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543494 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-ovs-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543532 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcmp5\" (UniqueName: \"kubernetes.io/projected/c005bc77-26a9-4402-abb3-8c16e17afb69-kube-api-access-vcmp5\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543556 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-dbus-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543570 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lh4p\" (UniqueName: \"kubernetes.io/projected/081a21fa-325e-4018-9ce0-abc2bb1899ec-kube-api-access-2lh4p\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543608 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8ssq\" (UniqueName: \"kubernetes.io/projected/8a3d6fb3-6f9e-40b5-8de6-30f0588df3db-kube-api-access-c8ssq\") pod \"nmstate-metrics-7f946cbc9-96mtg\" (UID: \"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.543821 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-nmstate-lock\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.573783 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.573847 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.573901 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.574594 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.574670 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022" gracePeriod=600 Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.580825 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.581573 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.592999 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.593228 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-x4292" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.593353 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.595759 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644718 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcmp5\" (UniqueName: \"kubernetes.io/projected/c005bc77-26a9-4402-abb3-8c16e17afb69-kube-api-access-vcmp5\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644793 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644838 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-dbus-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644859 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lh4p\" (UniqueName: \"kubernetes.io/projected/081a21fa-325e-4018-9ce0-abc2bb1899ec-kube-api-access-2lh4p\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644885 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.644949 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9nqg\" (UniqueName: \"kubernetes.io/projected/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-kube-api-access-j9nqg\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645172 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8ssq\" (UniqueName: \"kubernetes.io/projected/8a3d6fb3-6f9e-40b5-8de6-30f0588df3db-kube-api-access-c8ssq\") pod \"nmstate-metrics-7f946cbc9-96mtg\" (UID: \"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645273 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-dbus-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645306 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-nmstate-lock\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645339 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/081a21fa-325e-4018-9ce0-abc2bb1899ec-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645358 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-ovs-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645386 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-nmstate-lock\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.645438 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c005bc77-26a9-4402-abb3-8c16e17afb69-ovs-socket\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.656127 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/081a21fa-325e-4018-9ce0-abc2bb1899ec-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.660272 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcmp5\" (UniqueName: \"kubernetes.io/projected/c005bc77-26a9-4402-abb3-8c16e17afb69-kube-api-access-vcmp5\") pod \"nmstate-handler-c9l7g\" (UID: \"c005bc77-26a9-4402-abb3-8c16e17afb69\") " pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.660599 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lh4p\" (UniqueName: \"kubernetes.io/projected/081a21fa-325e-4018-9ce0-abc2bb1899ec-kube-api-access-2lh4p\") pod \"nmstate-webhook-5f6d4c5ccb-skw5g\" (UID: \"081a21fa-325e-4018-9ce0-abc2bb1899ec\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.665144 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8ssq\" (UniqueName: \"kubernetes.io/projected/8a3d6fb3-6f9e-40b5-8de6-30f0588df3db-kube-api-access-c8ssq\") pod \"nmstate-metrics-7f946cbc9-96mtg\" (UID: \"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.746827 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9nqg\" (UniqueName: \"kubernetes.io/projected/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-kube-api-access-j9nqg\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.747019 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.747067 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.748020 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.751360 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.765907 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9nqg\" (UniqueName: \"kubernetes.io/projected/c4e18387-6306-4f1c-8dd2-30cf9859dc6e-kube-api-access-j9nqg\") pod \"nmstate-console-plugin-7fbb5f6569-z58f9\" (UID: \"c4e18387-6306-4f1c-8dd2-30cf9859dc6e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.770071 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.783939 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-fd8bf89-fkrr8"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.784642 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.809125 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-fd8bf89-fkrr8"] Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.819007 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.835337 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848416 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848499 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q44z\" (UniqueName: \"kubernetes.io/projected/9b6f0190-ef58-4597-a37f-7fde499ff19e-kube-api-access-7q44z\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848536 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-oauth-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848560 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848582 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-trusted-ca-bundle\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848604 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-service-ca\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.848635 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-oauth-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: W1205 12:37:59.857854 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc005bc77_26a9_4402_abb3_8c16e17afb69.slice/crio-ebb69fe79de95bd24b2d3aa40fefd63b266951cc68d173a57d6f18c7e6ef0b83 WatchSource:0}: Error finding container ebb69fe79de95bd24b2d3aa40fefd63b266951cc68d173a57d6f18c7e6ef0b83: Status 404 returned error can't find the container with id ebb69fe79de95bd24b2d3aa40fefd63b266951cc68d173a57d6f18c7e6ef0b83 Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.898365 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.951535 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-oauth-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.951671 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.951719 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q44z\" (UniqueName: \"kubernetes.io/projected/9b6f0190-ef58-4597-a37f-7fde499ff19e-kube-api-access-7q44z\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.952497 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-oauth-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.952568 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.952986 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.953318 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-oauth-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.953389 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-trusted-ca-bundle\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.953436 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-service-ca\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.954489 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-trusted-ca-bundle\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.957339 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-oauth-config\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.958066 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9b6f0190-ef58-4597-a37f-7fde499ff19e-console-serving-cert\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.959582 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9b6f0190-ef58-4597-a37f-7fde499ff19e-service-ca\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:37:59 crc kubenswrapper[4784]: I1205 12:37:59.970589 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q44z\" (UniqueName: \"kubernetes.io/projected/9b6f0190-ef58-4597-a37f-7fde499ff19e-kube-api-access-7q44z\") pod \"console-fd8bf89-fkrr8\" (UID: \"9b6f0190-ef58-4597-a37f-7fde499ff19e\") " pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.101857 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.135132 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9"] Dec 05 12:38:00 crc kubenswrapper[4784]: W1205 12:38:00.146992 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4e18387_6306_4f1c_8dd2_30cf9859dc6e.slice/crio-1e6168164b1b4d436e6d3a782a36ef0376cf41b61029043302eecd1d5c9943f0 WatchSource:0}: Error finding container 1e6168164b1b4d436e6d3a782a36ef0376cf41b61029043302eecd1d5c9943f0: Status 404 returned error can't find the container with id 1e6168164b1b4d436e6d3a782a36ef0376cf41b61029043302eecd1d5c9943f0 Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.234413 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg"] Dec 05 12:38:00 crc kubenswrapper[4784]: W1205 12:38:00.240913 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a3d6fb3_6f9e_40b5_8de6_30f0588df3db.slice/crio-9377ceb2e91bf2060dceed85e927a398fd210e711bfc91d341ffc28478cd4cb5 WatchSource:0}: Error finding container 9377ceb2e91bf2060dceed85e927a398fd210e711bfc91d341ffc28478cd4cb5: Status 404 returned error can't find the container with id 9377ceb2e91bf2060dceed85e927a398fd210e711bfc91d341ffc28478cd4cb5 Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.287337 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g"] Dec 05 12:38:00 crc kubenswrapper[4784]: W1205 12:38:00.287985 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod081a21fa_325e_4018_9ce0_abc2bb1899ec.slice/crio-4b30655d3b793b259c2576912ff2841994bdfed94b9c9f657ec3e2feccd9328c WatchSource:0}: Error finding container 4b30655d3b793b259c2576912ff2841994bdfed94b9c9f657ec3e2feccd9328c: Status 404 returned error can't find the container with id 4b30655d3b793b259c2576912ff2841994bdfed94b9c9f657ec3e2feccd9328c Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.574868 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-fd8bf89-fkrr8"] Dec 05 12:38:00 crc kubenswrapper[4784]: W1205 12:38:00.584536 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b6f0190_ef58_4597_a37f_7fde499ff19e.slice/crio-74f58f959f44623b12c69ab996e4d85080439586d8889cfbb279f7dd70133d1d WatchSource:0}: Error finding container 74f58f959f44623b12c69ab996e4d85080439586d8889cfbb279f7dd70133d1d: Status 404 returned error can't find the container with id 74f58f959f44623b12c69ab996e4d85080439586d8889cfbb279f7dd70133d1d Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.859308 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-fd8bf89-fkrr8" event={"ID":"9b6f0190-ef58-4597-a37f-7fde499ff19e","Type":"ContainerStarted","Data":"74f58f959f44623b12c69ab996e4d85080439586d8889cfbb279f7dd70133d1d"} Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.861458 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-c9l7g" event={"ID":"c005bc77-26a9-4402-abb3-8c16e17afb69","Type":"ContainerStarted","Data":"ebb69fe79de95bd24b2d3aa40fefd63b266951cc68d173a57d6f18c7e6ef0b83"} Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.862669 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" event={"ID":"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db","Type":"ContainerStarted","Data":"9377ceb2e91bf2060dceed85e927a398fd210e711bfc91d341ffc28478cd4cb5"} Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.865340 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022" exitCode=0 Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.865401 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022"} Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.866064 4784 scope.go:117] "RemoveContainer" containerID="e5e541d632ebc1ef6afb4ee0c8edcd4e699e191af28a85d7c631d6cd3544a8e3" Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.867697 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" event={"ID":"c4e18387-6306-4f1c-8dd2-30cf9859dc6e","Type":"ContainerStarted","Data":"1e6168164b1b4d436e6d3a782a36ef0376cf41b61029043302eecd1d5c9943f0"} Dec 05 12:38:00 crc kubenswrapper[4784]: I1205 12:38:00.869136 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" event={"ID":"081a21fa-325e-4018-9ce0-abc2bb1899ec","Type":"ContainerStarted","Data":"4b30655d3b793b259c2576912ff2841994bdfed94b9c9f657ec3e2feccd9328c"} Dec 05 12:38:01 crc kubenswrapper[4784]: I1205 12:38:01.876689 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-fd8bf89-fkrr8" event={"ID":"9b6f0190-ef58-4597-a37f-7fde499ff19e","Type":"ContainerStarted","Data":"eb02b61fc07a307ffd144e243b5c910e370e781db4d42bdfa6558a475020b73a"} Dec 05 12:38:01 crc kubenswrapper[4784]: I1205 12:38:01.880934 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997"} Dec 05 12:38:01 crc kubenswrapper[4784]: I1205 12:38:01.901912 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-fd8bf89-fkrr8" podStartSLOduration=2.90188631 podStartE2EDuration="2.90188631s" podCreationTimestamp="2025-12-05 12:37:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:38:01.893588324 +0000 UTC m=+761.313655139" watchObservedRunningTime="2025-12-05 12:38:01.90188631 +0000 UTC m=+761.321953135" Dec 05 12:38:03 crc kubenswrapper[4784]: I1205 12:38:03.764829 4784 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.913519 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" event={"ID":"c4e18387-6306-4f1c-8dd2-30cf9859dc6e","Type":"ContainerStarted","Data":"1fdfef7d9998553bf526424f70888efae5dac06f2d148b800b45f707d13f0777"} Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.915622 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" event={"ID":"081a21fa-325e-4018-9ce0-abc2bb1899ec","Type":"ContainerStarted","Data":"54c51659d2e1691c1bf0137215ef89d8c14b1467e99aea5c64bb8955d5bc1aaa"} Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.916024 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.918025 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-c9l7g" event={"ID":"c005bc77-26a9-4402-abb3-8c16e17afb69","Type":"ContainerStarted","Data":"c08908a727c7055512a5867c2a6a5eb907e55768bf299a095f01a702b58e8fa2"} Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.918249 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.919609 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" event={"ID":"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db","Type":"ContainerStarted","Data":"fc9c855ac9c20da6739414fe771fd634add1c755f882c0c5977bb63e5ff45fe6"} Dec 05 12:38:04 crc kubenswrapper[4784]: I1205 12:38:04.934890 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-z58f9" podStartSLOduration=1.95654458 podStartE2EDuration="5.934867512s" podCreationTimestamp="2025-12-05 12:37:59 +0000 UTC" firstStartedPulling="2025-12-05 12:38:00.14984824 +0000 UTC m=+759.569915055" lastFinishedPulling="2025-12-05 12:38:04.128171172 +0000 UTC m=+763.548237987" observedRunningTime="2025-12-05 12:38:04.932985952 +0000 UTC m=+764.353052767" watchObservedRunningTime="2025-12-05 12:38:04.934867512 +0000 UTC m=+764.354934327" Dec 05 12:38:05 crc kubenswrapper[4784]: I1205 12:38:05.016120 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-c9l7g" podStartSLOduration=1.7427801490000001 podStartE2EDuration="6.016094949s" podCreationTimestamp="2025-12-05 12:37:59 +0000 UTC" firstStartedPulling="2025-12-05 12:37:59.860587386 +0000 UTC m=+759.280654201" lastFinishedPulling="2025-12-05 12:38:04.133902186 +0000 UTC m=+763.553969001" observedRunningTime="2025-12-05 12:38:05.01520903 +0000 UTC m=+764.435275845" watchObservedRunningTime="2025-12-05 12:38:05.016094949 +0000 UTC m=+764.436161764" Dec 05 12:38:05 crc kubenswrapper[4784]: I1205 12:38:05.018353 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" podStartSLOduration=2.158720279 podStartE2EDuration="6.018345271s" podCreationTimestamp="2025-12-05 12:37:59 +0000 UTC" firstStartedPulling="2025-12-05 12:38:00.290488094 +0000 UTC m=+759.710554909" lastFinishedPulling="2025-12-05 12:38:04.150113076 +0000 UTC m=+763.570179901" observedRunningTime="2025-12-05 12:38:04.992722209 +0000 UTC m=+764.412789024" watchObservedRunningTime="2025-12-05 12:38:05.018345271 +0000 UTC m=+764.438412086" Dec 05 12:38:09 crc kubenswrapper[4784]: I1205 12:38:09.308536 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" event={"ID":"8a3d6fb3-6f9e-40b5-8de6-30f0588df3db","Type":"ContainerStarted","Data":"493b5a597421f9bed353481b3ec4077d3345d2ab4a81a627cf7ab243461b6c8c"} Dec 05 12:38:09 crc kubenswrapper[4784]: I1205 12:38:09.334291 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-96mtg" podStartSLOduration=1.917380963 podStartE2EDuration="10.334255957s" podCreationTimestamp="2025-12-05 12:37:59 +0000 UTC" firstStartedPulling="2025-12-05 12:38:00.244487137 +0000 UTC m=+759.664553962" lastFinishedPulling="2025-12-05 12:38:08.661362131 +0000 UTC m=+768.081428956" observedRunningTime="2025-12-05 12:38:09.330347402 +0000 UTC m=+768.750414227" watchObservedRunningTime="2025-12-05 12:38:09.334255957 +0000 UTC m=+768.754322812" Dec 05 12:38:09 crc kubenswrapper[4784]: I1205 12:38:09.865851 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-c9l7g" Dec 05 12:38:10 crc kubenswrapper[4784]: I1205 12:38:10.103527 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:10 crc kubenswrapper[4784]: I1205 12:38:10.103829 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:10 crc kubenswrapper[4784]: I1205 12:38:10.110575 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:10 crc kubenswrapper[4784]: I1205 12:38:10.322716 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-fd8bf89-fkrr8" Dec 05 12:38:10 crc kubenswrapper[4784]: I1205 12:38:10.386683 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:38:19 crc kubenswrapper[4784]: I1205 12:38:19.825961 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-skw5g" Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.915838 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j"] Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.917799 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.919649 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.929738 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j"] Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.991449 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.991837 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:34 crc kubenswrapper[4784]: I1205 12:38:34.991914 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7s7r\" (UniqueName: \"kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.092298 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7s7r\" (UniqueName: \"kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.092430 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.092459 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.092834 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.093148 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.113201 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7s7r\" (UniqueName: \"kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.235061 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.439929 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-m7m44" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" containerID="cri-o://9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b" gracePeriod=15 Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.519808 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j"] Dec 05 12:38:35 crc kubenswrapper[4784]: W1205 12:38:35.552300 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3027214_9fa3_4dd0_93f5_b3b316247c73.slice/crio-65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9 WatchSource:0}: Error finding container 65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9: Status 404 returned error can't find the container with id 65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9 Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.843716 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-m7m44_663c4c9a-9738-4c49-9199-d2a18cd6d4be/console/0.log" Dec 05 12:38:35 crc kubenswrapper[4784]: I1205 12:38:35.843780 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.003853 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.004879 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.004961 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005026 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005075 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005132 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005234 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbw6t\" (UniqueName: \"kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t\") pod \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\" (UID: \"663c4c9a-9738-4c49-9199-d2a18cd6d4be\") " Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005900 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config" (OuterVolumeSpecName: "console-config") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005955 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.005993 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca" (OuterVolumeSpecName: "service-ca") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.007513 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.013544 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.013947 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.014130 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t" (OuterVolumeSpecName: "kube-api-access-dbw6t") pod "663c4c9a-9738-4c49-9199-d2a18cd6d4be" (UID: "663c4c9a-9738-4c49-9199-d2a18cd6d4be"). InnerVolumeSpecName "kube-api-access-dbw6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.106852 4784 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.106927 4784 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.106949 4784 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.106968 4784 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.106987 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbw6t\" (UniqueName: \"kubernetes.io/projected/663c4c9a-9738-4c49-9199-d2a18cd6d4be-kube-api-access-dbw6t\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.107040 4784 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/663c4c9a-9738-4c49-9199-d2a18cd6d4be-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.107058 4784 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/663c4c9a-9738-4c49-9199-d2a18cd6d4be-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485557 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-m7m44_663c4c9a-9738-4c49-9199-d2a18cd6d4be/console/0.log" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485622 4784 generic.go:334] "Generic (PLEG): container finished" podID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerID="9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b" exitCode=2 Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485699 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m7m44" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485747 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m7m44" event={"ID":"663c4c9a-9738-4c49-9199-d2a18cd6d4be","Type":"ContainerDied","Data":"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b"} Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485786 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m7m44" event={"ID":"663c4c9a-9738-4c49-9199-d2a18cd6d4be","Type":"ContainerDied","Data":"fee62a5a67b8b559bfaae53985558252d9a01b8b38558502b7e0d5c26b6e5557"} Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.485802 4784 scope.go:117] "RemoveContainer" containerID="9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.487706 4784 generic.go:334] "Generic (PLEG): container finished" podID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerID="69a0b1a05d4cbdb47ed32f12f36d8656dec3e22ee3bff68fabef355fb556f21f" exitCode=0 Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.487745 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" event={"ID":"a3027214-9fa3-4dd0-93f5-b3b316247c73","Type":"ContainerDied","Data":"69a0b1a05d4cbdb47ed32f12f36d8656dec3e22ee3bff68fabef355fb556f21f"} Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.487762 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" event={"ID":"a3027214-9fa3-4dd0-93f5-b3b316247c73","Type":"ContainerStarted","Data":"65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9"} Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.526685 4784 scope.go:117] "RemoveContainer" containerID="9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b" Dec 05 12:38:36 crc kubenswrapper[4784]: E1205 12:38:36.527221 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b\": container with ID starting with 9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b not found: ID does not exist" containerID="9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.527279 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b"} err="failed to get container status \"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b\": rpc error: code = NotFound desc = could not find container \"9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b\": container with ID starting with 9bf026b5dd8a32187f00e0a1e059a59224ba691628d7bb95f8278cb3a9b06b0b not found: ID does not exist" Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.548839 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:38:36 crc kubenswrapper[4784]: I1205 12:38:36.554329 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-m7m44"] Dec 05 12:38:37 crc kubenswrapper[4784]: I1205 12:38:37.006478 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" path="/var/lib/kubelet/pods/663c4c9a-9738-4c49-9199-d2a18cd6d4be/volumes" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.459397 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:38 crc kubenswrapper[4784]: E1205 12:38:38.460428 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.460446 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.460584 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="663c4c9a-9738-4c49-9199-d2a18cd6d4be" containerName="console" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.461766 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.472765 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.499958 4784 generic.go:334] "Generic (PLEG): container finished" podID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerID="803f5ff71c77f3ed2bebb26d73c308d57ec2e4904fa9642d99945831aeaeb5bd" exitCode=0 Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.500005 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" event={"ID":"a3027214-9fa3-4dd0-93f5-b3b316247c73","Type":"ContainerDied","Data":"803f5ff71c77f3ed2bebb26d73c308d57ec2e4904fa9642d99945831aeaeb5bd"} Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.541341 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml7pm\" (UniqueName: \"kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.541399 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.541480 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.643201 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.643280 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml7pm\" (UniqueName: \"kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.643308 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.643858 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.643884 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.669024 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml7pm\" (UniqueName: \"kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm\") pod \"redhat-operators-xrjl9\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:38 crc kubenswrapper[4784]: I1205 12:38:38.776573 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.074248 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.508065 4784 generic.go:334] "Generic (PLEG): container finished" podID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerID="8d57faea9ba004fa43b4cd68b757be9dc898161f00768362e4bd256a932cb476" exitCode=0 Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.508270 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" event={"ID":"a3027214-9fa3-4dd0-93f5-b3b316247c73","Type":"ContainerDied","Data":"8d57faea9ba004fa43b4cd68b757be9dc898161f00768362e4bd256a932cb476"} Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.512132 4784 generic.go:334] "Generic (PLEG): container finished" podID="cad8d357-acd6-4168-a751-39bafe879ca8" containerID="1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f" exitCode=0 Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.512171 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerDied","Data":"1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f"} Dec 05 12:38:39 crc kubenswrapper[4784]: I1205 12:38:39.512233 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerStarted","Data":"9dc157edb93ec6127aa9113c2ae47baf03508930c09099b01c664192c1527a18"} Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.520485 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerStarted","Data":"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c"} Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.790426 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.978249 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7s7r\" (UniqueName: \"kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r\") pod \"a3027214-9fa3-4dd0-93f5-b3b316247c73\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.978516 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util\") pod \"a3027214-9fa3-4dd0-93f5-b3b316247c73\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.978584 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle\") pod \"a3027214-9fa3-4dd0-93f5-b3b316247c73\" (UID: \"a3027214-9fa3-4dd0-93f5-b3b316247c73\") " Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.979784 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle" (OuterVolumeSpecName: "bundle") pod "a3027214-9fa3-4dd0-93f5-b3b316247c73" (UID: "a3027214-9fa3-4dd0-93f5-b3b316247c73"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:40 crc kubenswrapper[4784]: I1205 12:38:40.985199 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r" (OuterVolumeSpecName: "kube-api-access-w7s7r") pod "a3027214-9fa3-4dd0-93f5-b3b316247c73" (UID: "a3027214-9fa3-4dd0-93f5-b3b316247c73"). InnerVolumeSpecName "kube-api-access-w7s7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.007611 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util" (OuterVolumeSpecName: "util") pod "a3027214-9fa3-4dd0-93f5-b3b316247c73" (UID: "a3027214-9fa3-4dd0-93f5-b3b316247c73"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.079989 4784 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.080051 4784 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a3027214-9fa3-4dd0-93f5-b3b316247c73-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.080068 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7s7r\" (UniqueName: \"kubernetes.io/projected/a3027214-9fa3-4dd0-93f5-b3b316247c73-kube-api-access-w7s7r\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.530389 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" event={"ID":"a3027214-9fa3-4dd0-93f5-b3b316247c73","Type":"ContainerDied","Data":"65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9"} Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.530436 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65d223879b42ba81171546bc710f04e419448fecb3bcaf752e8ea37af5babaa9" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.530434 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j" Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.533551 4784 generic.go:334] "Generic (PLEG): container finished" podID="cad8d357-acd6-4168-a751-39bafe879ca8" containerID="d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c" exitCode=0 Dec 05 12:38:41 crc kubenswrapper[4784]: I1205 12:38:41.533589 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerDied","Data":"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c"} Dec 05 12:38:43 crc kubenswrapper[4784]: I1205 12:38:43.550169 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerStarted","Data":"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66"} Dec 05 12:38:43 crc kubenswrapper[4784]: I1205 12:38:43.573404 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xrjl9" podStartSLOduration=2.615768766 podStartE2EDuration="5.573381962s" podCreationTimestamp="2025-12-05 12:38:38 +0000 UTC" firstStartedPulling="2025-12-05 12:38:39.513765853 +0000 UTC m=+798.933832668" lastFinishedPulling="2025-12-05 12:38:42.471378999 +0000 UTC m=+801.891445864" observedRunningTime="2025-12-05 12:38:43.568594032 +0000 UTC m=+802.988660847" watchObservedRunningTime="2025-12-05 12:38:43.573381962 +0000 UTC m=+802.993448787" Dec 05 12:38:48 crc kubenswrapper[4784]: I1205 12:38:48.777080 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:48 crc kubenswrapper[4784]: I1205 12:38:48.777652 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:48 crc kubenswrapper[4784]: I1205 12:38:48.838666 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:49 crc kubenswrapper[4784]: I1205 12:38:49.629782 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.253220 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.253647 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xrjl9" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="registry-server" containerID="cri-o://0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66" gracePeriod=2 Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.706835 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv"] Dec 05 12:38:52 crc kubenswrapper[4784]: E1205 12:38:52.707386 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="pull" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.707401 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="pull" Dec 05 12:38:52 crc kubenswrapper[4784]: E1205 12:38:52.707416 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="util" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.707422 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="util" Dec 05 12:38:52 crc kubenswrapper[4784]: E1205 12:38:52.707439 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="extract" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.707446 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="extract" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.707541 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3027214-9fa3-4dd0-93f5-b3b316247c73" containerName="extract" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.707956 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.710384 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.710441 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.710534 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.711216 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.711984 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-kf2tg" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.754301 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv"] Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.837867 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-apiservice-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.837947 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-webhook-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.837972 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5hmx\" (UniqueName: \"kubernetes.io/projected/c5573d49-4a27-4dbb-ba09-0a6a3306e365-kube-api-access-p5hmx\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.939286 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-apiservice-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.939376 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-webhook-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.939403 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5hmx\" (UniqueName: \"kubernetes.io/projected/c5573d49-4a27-4dbb-ba09-0a6a3306e365-kube-api-access-p5hmx\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.946029 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-apiservice-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.947104 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5573d49-4a27-4dbb-ba09-0a6a3306e365-webhook-cert\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:52 crc kubenswrapper[4784]: I1205 12:38:52.975397 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5hmx\" (UniqueName: \"kubernetes.io/projected/c5573d49-4a27-4dbb-ba09-0a6a3306e365-kube-api-access-p5hmx\") pod \"metallb-operator-controller-manager-54655dd747-gqpqv\" (UID: \"c5573d49-4a27-4dbb-ba09-0a6a3306e365\") " pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.026893 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.049803 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-849874cf66-slct4"] Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.050583 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.053264 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.053511 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-qvb8g" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.054074 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.070289 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-849874cf66-slct4"] Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.244758 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-apiservice-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.245047 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-webhook-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.245072 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpr9z\" (UniqueName: \"kubernetes.io/projected/27feea19-4a55-4d86-874a-60b62859a65c-kube-api-access-fpr9z\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.346158 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-apiservice-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.346255 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-webhook-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.346291 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpr9z\" (UniqueName: \"kubernetes.io/projected/27feea19-4a55-4d86-874a-60b62859a65c-kube-api-access-fpr9z\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.351946 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-webhook-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.358812 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/27feea19-4a55-4d86-874a-60b62859a65c-apiservice-cert\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.369424 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpr9z\" (UniqueName: \"kubernetes.io/projected/27feea19-4a55-4d86-874a-60b62859a65c-kube-api-access-fpr9z\") pod \"metallb-operator-webhook-server-849874cf66-slct4\" (UID: \"27feea19-4a55-4d86-874a-60b62859a65c\") " pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.417607 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.527548 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv"] Dec 05 12:38:53 crc kubenswrapper[4784]: I1205 12:38:53.730461 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-849874cf66-slct4"] Dec 05 12:38:53 crc kubenswrapper[4784]: W1205 12:38:53.804502 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27feea19_4a55_4d86_874a_60b62859a65c.slice/crio-b5b7f8138256f421cc06bcdfa4d1a48eb67996f1fbb14b753184858a8a159fe3 WatchSource:0}: Error finding container b5b7f8138256f421cc06bcdfa4d1a48eb67996f1fbb14b753184858a8a159fe3: Status 404 returned error can't find the container with id b5b7f8138256f421cc06bcdfa4d1a48eb67996f1fbb14b753184858a8a159fe3 Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.472122 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.624753 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" event={"ID":"c5573d49-4a27-4dbb-ba09-0a6a3306e365","Type":"ContainerStarted","Data":"4334749b9edfc80a3ee1025fa094101145a8f600364e94cc3afa5d886782be7d"} Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.627006 4784 generic.go:334] "Generic (PLEG): container finished" podID="cad8d357-acd6-4168-a751-39bafe879ca8" containerID="0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66" exitCode=0 Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.627050 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerDied","Data":"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66"} Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.627076 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrjl9" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.627083 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrjl9" event={"ID":"cad8d357-acd6-4168-a751-39bafe879ca8","Type":"ContainerDied","Data":"9dc157edb93ec6127aa9113c2ae47baf03508930c09099b01c664192c1527a18"} Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.627107 4784 scope.go:117] "RemoveContainer" containerID="0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.628462 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" event={"ID":"27feea19-4a55-4d86-874a-60b62859a65c","Type":"ContainerStarted","Data":"b5b7f8138256f421cc06bcdfa4d1a48eb67996f1fbb14b753184858a8a159fe3"} Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.646079 4784 scope.go:117] "RemoveContainer" containerID="d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.661977 4784 scope.go:117] "RemoveContainer" containerID="1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.662388 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities\") pod \"cad8d357-acd6-4168-a751-39bafe879ca8\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.662490 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml7pm\" (UniqueName: \"kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm\") pod \"cad8d357-acd6-4168-a751-39bafe879ca8\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.662571 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content\") pod \"cad8d357-acd6-4168-a751-39bafe879ca8\" (UID: \"cad8d357-acd6-4168-a751-39bafe879ca8\") " Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.663737 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities" (OuterVolumeSpecName: "utilities") pod "cad8d357-acd6-4168-a751-39bafe879ca8" (UID: "cad8d357-acd6-4168-a751-39bafe879ca8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.668376 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm" (OuterVolumeSpecName: "kube-api-access-ml7pm") pod "cad8d357-acd6-4168-a751-39bafe879ca8" (UID: "cad8d357-acd6-4168-a751-39bafe879ca8"). InnerVolumeSpecName "kube-api-access-ml7pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.696075 4784 scope.go:117] "RemoveContainer" containerID="0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66" Dec 05 12:38:54 crc kubenswrapper[4784]: E1205 12:38:54.696618 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66\": container with ID starting with 0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66 not found: ID does not exist" containerID="0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.696662 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66"} err="failed to get container status \"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66\": rpc error: code = NotFound desc = could not find container \"0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66\": container with ID starting with 0bc521b467ac33fba46cf94389ffa3094796b2901a67881921daf28a4a8cec66 not found: ID does not exist" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.696687 4784 scope.go:117] "RemoveContainer" containerID="d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c" Dec 05 12:38:54 crc kubenswrapper[4784]: E1205 12:38:54.697034 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c\": container with ID starting with d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c not found: ID does not exist" containerID="d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.697062 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c"} err="failed to get container status \"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c\": rpc error: code = NotFound desc = could not find container \"d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c\": container with ID starting with d87abb781aed73406968b0c17833544d29ce754c6362d0b90c2db595f4eefd7c not found: ID does not exist" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.697080 4784 scope.go:117] "RemoveContainer" containerID="1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f" Dec 05 12:38:54 crc kubenswrapper[4784]: E1205 12:38:54.697393 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f\": container with ID starting with 1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f not found: ID does not exist" containerID="1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.697421 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f"} err="failed to get container status \"1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f\": rpc error: code = NotFound desc = could not find container \"1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f\": container with ID starting with 1711b0c648e32ec554631855aca0e9515a65eacd27c39351550226b801d1c81f not found: ID does not exist" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.763896 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml7pm\" (UniqueName: \"kubernetes.io/projected/cad8d357-acd6-4168-a751-39bafe879ca8-kube-api-access-ml7pm\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.763930 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.764654 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cad8d357-acd6-4168-a751-39bafe879ca8" (UID: "cad8d357-acd6-4168-a751-39bafe879ca8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.865385 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad8d357-acd6-4168-a751-39bafe879ca8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.957421 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:54 crc kubenswrapper[4784]: I1205 12:38:54.962648 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xrjl9"] Dec 05 12:38:55 crc kubenswrapper[4784]: I1205 12:38:55.013688 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" path="/var/lib/kubelet/pods/cad8d357-acd6-4168-a751-39bafe879ca8/volumes" Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.677511 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" event={"ID":"c5573d49-4a27-4dbb-ba09-0a6a3306e365","Type":"ContainerStarted","Data":"422e5ab7ffe1236285a099008131df0811ce4c57f5211e8d9b3c9d0442e35da2"} Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.678065 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.678946 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" event={"ID":"27feea19-4a55-4d86-874a-60b62859a65c","Type":"ContainerStarted","Data":"d5b44405f59c64142d00a27103add2eafc0f4bd4c61012bb06e47e1ecd7b06ec"} Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.679146 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.701535 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" podStartSLOduration=2.64211661 podStartE2EDuration="7.701510354s" podCreationTimestamp="2025-12-05 12:38:52 +0000 UTC" firstStartedPulling="2025-12-05 12:38:53.711435645 +0000 UTC m=+813.131502460" lastFinishedPulling="2025-12-05 12:38:58.770829389 +0000 UTC m=+818.190896204" observedRunningTime="2025-12-05 12:38:59.697506539 +0000 UTC m=+819.117573364" watchObservedRunningTime="2025-12-05 12:38:59.701510354 +0000 UTC m=+819.121577179" Dec 05 12:38:59 crc kubenswrapper[4784]: I1205 12:38:59.727462 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" podStartSLOduration=1.7418784729999999 podStartE2EDuration="6.727439328s" podCreationTimestamp="2025-12-05 12:38:53 +0000 UTC" firstStartedPulling="2025-12-05 12:38:53.807526762 +0000 UTC m=+813.227593577" lastFinishedPulling="2025-12-05 12:38:58.793087617 +0000 UTC m=+818.213154432" observedRunningTime="2025-12-05 12:38:59.720541322 +0000 UTC m=+819.140608157" watchObservedRunningTime="2025-12-05 12:38:59.727439328 +0000 UTC m=+819.147506153" Dec 05 12:39:13 crc kubenswrapper[4784]: I1205 12:39:13.423466 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-849874cf66-slct4" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.031285 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-54655dd747-gqpqv" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.759947 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-xjbhc"] Dec 05 12:39:33 crc kubenswrapper[4784]: E1205 12:39:33.760399 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="registry-server" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.760416 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="registry-server" Dec 05 12:39:33 crc kubenswrapper[4784]: E1205 12:39:33.760437 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="extract-utilities" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.760443 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="extract-utilities" Dec 05 12:39:33 crc kubenswrapper[4784]: E1205 12:39:33.760451 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="extract-content" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.760458 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="extract-content" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.760587 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad8d357-acd6-4168-a751-39bafe879ca8" containerName="registry-server" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.762976 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.769263 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff"] Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.770326 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.770344 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.770736 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mgwsk" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.771873 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.774669 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.785983 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff"] Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789636 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-startup\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789713 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-reloader\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789737 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-sockets\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789759 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics-certs\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789785 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789810 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-conf\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.789833 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbghh\" (UniqueName: \"kubernetes.io/projected/45594d33-f8da-48e8-b9c2-d60c96a98f64-kube-api-access-tbghh\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891058 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891113 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-conf\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891134 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbghh\" (UniqueName: \"kubernetes.io/projected/45594d33-f8da-48e8-b9c2-d60c96a98f64-kube-api-access-tbghh\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891161 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2r5h\" (UniqueName: \"kubernetes.io/projected/a65c75b7-3183-4839-a70e-d16e4776e89d-kube-api-access-g2r5h\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891224 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-startup\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891254 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-reloader\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891271 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-sockets\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891292 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics-certs\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891313 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a65c75b7-3183-4839-a70e-d16e4776e89d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891549 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-conf\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891742 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.891811 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-sockets\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.892211 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/45594d33-f8da-48e8-b9c2-d60c96a98f64-reloader\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.892386 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/45594d33-f8da-48e8-b9c2-d60c96a98f64-frr-startup\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.896114 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-cffk7"] Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.897161 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cffk7" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.904032 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.904366 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/45594d33-f8da-48e8-b9c2-d60c96a98f64-metrics-certs\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.908939 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.909161 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6vx74" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.909307 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.920285 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-jgfs9"] Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.921235 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.924451 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.929075 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbghh\" (UniqueName: \"kubernetes.io/projected/45594d33-f8da-48e8-b9c2-d60c96a98f64-kube-api-access-tbghh\") pod \"frr-k8s-xjbhc\" (UID: \"45594d33-f8da-48e8-b9c2-d60c96a98f64\") " pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.955471 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-jgfs9"] Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993299 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-cert\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993362 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metrics-certs\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993393 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metallb-excludel2\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993412 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmqm4\" (UniqueName: \"kubernetes.io/projected/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-kube-api-access-xmqm4\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993442 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7vbw\" (UniqueName: \"kubernetes.io/projected/6bb837d7-0a54-4a2c-a943-70838b7b3d58-kube-api-access-w7vbw\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993474 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a65c75b7-3183-4839-a70e-d16e4776e89d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993511 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2r5h\" (UniqueName: \"kubernetes.io/projected/a65c75b7-3183-4839-a70e-d16e4776e89d-kube-api-access-g2r5h\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993537 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:33 crc kubenswrapper[4784]: I1205 12:39:33.993557 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.005954 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a65c75b7-3183-4839-a70e-d16e4776e89d-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.016029 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2r5h\" (UniqueName: \"kubernetes.io/projected/a65c75b7-3183-4839-a70e-d16e4776e89d-kube-api-access-g2r5h\") pod \"frr-k8s-webhook-server-7fcb986d4-956ff\" (UID: \"a65c75b7-3183-4839-a70e-d16e4776e89d\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.088838 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095261 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095345 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095389 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-cert\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095418 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metrics-certs\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095441 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metallb-excludel2\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095464 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmqm4\" (UniqueName: \"kubernetes.io/projected/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-kube-api-access-xmqm4\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.095485 4784 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.095594 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs podName:6bb837d7-0a54-4a2c-a943-70838b7b3d58 nodeName:}" failed. No retries permitted until 2025-12-05 12:39:34.595568505 +0000 UTC m=+854.015635320 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs") pod "controller-f8648f98b-jgfs9" (UID: "6bb837d7-0a54-4a2c-a943-70838b7b3d58") : secret "controller-certs-secret" not found Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.095495 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7vbw\" (UniqueName: \"kubernetes.io/projected/6bb837d7-0a54-4a2c-a943-70838b7b3d58-kube-api-access-w7vbw\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.096677 4784 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.096715 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist podName:f4d31217-a7d3-490f-8bba-c9d8ca4c47ca nodeName:}" failed. No retries permitted until 2025-12-05 12:39:34.596705991 +0000 UTC m=+854.016772806 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist") pod "speaker-cffk7" (UID: "f4d31217-a7d3-490f-8bba-c9d8ca4c47ca") : secret "metallb-memberlist" not found Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.097101 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.097366 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metallb-excludel2\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.102511 4784 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.103262 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-metrics-certs\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.114066 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-cert\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.125482 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmqm4\" (UniqueName: \"kubernetes.io/projected/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-kube-api-access-xmqm4\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.127505 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7vbw\" (UniqueName: \"kubernetes.io/projected/6bb837d7-0a54-4a2c-a943-70838b7b3d58-kube-api-access-w7vbw\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.326581 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff"] Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.605292 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.605346 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.605516 4784 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 12:39:34 crc kubenswrapper[4784]: E1205 12:39:34.605604 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist podName:f4d31217-a7d3-490f-8bba-c9d8ca4c47ca nodeName:}" failed. No retries permitted until 2025-12-05 12:39:35.605583778 +0000 UTC m=+855.025650593 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist") pod "speaker-cffk7" (UID: "f4d31217-a7d3-490f-8bba-c9d8ca4c47ca") : secret "metallb-memberlist" not found Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.613164 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6bb837d7-0a54-4a2c-a943-70838b7b3d58-metrics-certs\") pod \"controller-f8648f98b-jgfs9\" (UID: \"6bb837d7-0a54-4a2c-a943-70838b7b3d58\") " pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.651705 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.843872 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-jgfs9"] Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.912350 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-jgfs9" event={"ID":"6bb837d7-0a54-4a2c-a943-70838b7b3d58","Type":"ContainerStarted","Data":"484a20f9424c1d75e456746239875967b80761fd61cb3f0fd4e0b22fca5591f6"} Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.914035 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" event={"ID":"a65c75b7-3183-4839-a70e-d16e4776e89d","Type":"ContainerStarted","Data":"e5e95274d122a69a06d92bdf9a8c42667b8c12353a4a8fe292d06eab2f24c163"} Dec 05 12:39:34 crc kubenswrapper[4784]: I1205 12:39:34.915588 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"e64ddc11f87a4608d259b83a536bc6c01248b59baba9071e518ec7d318b412f4"} Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.618865 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.624579 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f4d31217-a7d3-490f-8bba-c9d8ca4c47ca-memberlist\") pod \"speaker-cffk7\" (UID: \"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca\") " pod="metallb-system/speaker-cffk7" Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.784095 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cffk7" Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.932005 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-jgfs9" event={"ID":"6bb837d7-0a54-4a2c-a943-70838b7b3d58","Type":"ContainerStarted","Data":"38bfd4f5960b20f225bfec3dab4e6689129f4f668cccd1f34283a1e04a6943eb"} Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.932072 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-jgfs9" event={"ID":"6bb837d7-0a54-4a2c-a943-70838b7b3d58","Type":"ContainerStarted","Data":"d1d2784fe719951c7f28acb2424207be65609ab944cb5ad1d031a9133c7c5eb9"} Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.932138 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.939005 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cffk7" event={"ID":"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca","Type":"ContainerStarted","Data":"712633d335932713514edb70784c59382c1c5a1c505dc815a6b6f270025f22f2"} Dec 05 12:39:35 crc kubenswrapper[4784]: I1205 12:39:35.951666 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-jgfs9" podStartSLOduration=2.951644252 podStartE2EDuration="2.951644252s" podCreationTimestamp="2025-12-05 12:39:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:39:35.94747188 +0000 UTC m=+855.367538705" watchObservedRunningTime="2025-12-05 12:39:35.951644252 +0000 UTC m=+855.371711067" Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.941440 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.944319 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.954104 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.963291 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cffk7" event={"ID":"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca","Type":"ContainerStarted","Data":"476e5cf222defa8114f21a439bd2bc624c2239f472209d0da8515f0be1a82604"} Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.963333 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cffk7" event={"ID":"f4d31217-a7d3-490f-8bba-c9d8ca4c47ca","Type":"ContainerStarted","Data":"5609e0efee62e6d3545dee1f382368531ba47fdb06123cd293b57786a5567369"} Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.963354 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-cffk7" Dec 05 12:39:36 crc kubenswrapper[4784]: I1205 12:39:36.998227 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-cffk7" podStartSLOduration=3.99820668 podStartE2EDuration="3.99820668s" podCreationTimestamp="2025-12-05 12:39:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:39:36.997938842 +0000 UTC m=+856.418005677" watchObservedRunningTime="2025-12-05 12:39:36.99820668 +0000 UTC m=+856.418273495" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.038973 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.039051 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjtxg\" (UniqueName: \"kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.039386 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.140887 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.140940 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjtxg\" (UniqueName: \"kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.141054 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.141568 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.141836 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.161256 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjtxg\" (UniqueName: \"kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg\") pod \"community-operators-5hpbw\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.261426 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.794998 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:39:37 crc kubenswrapper[4784]: W1205 12:39:37.823417 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod739a3684_dc27_4300_a1e7_cf1b4e72d8a1.slice/crio-e90e59c363c632286153b261b7088505cbf8799c7a9e55b2eecfefcfc038b52b WatchSource:0}: Error finding container e90e59c363c632286153b261b7088505cbf8799c7a9e55b2eecfefcfc038b52b: Status 404 returned error can't find the container with id e90e59c363c632286153b261b7088505cbf8799c7a9e55b2eecfefcfc038b52b Dec 05 12:39:37 crc kubenswrapper[4784]: I1205 12:39:37.974754 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerStarted","Data":"e90e59c363c632286153b261b7088505cbf8799c7a9e55b2eecfefcfc038b52b"} Dec 05 12:39:38 crc kubenswrapper[4784]: I1205 12:39:38.983513 4784 generic.go:334] "Generic (PLEG): container finished" podID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerID="9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f" exitCode=0 Dec 05 12:39:38 crc kubenswrapper[4784]: I1205 12:39:38.983560 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerDied","Data":"9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f"} Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.029443 4784 generic.go:334] "Generic (PLEG): container finished" podID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerID="829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9" exitCode=0 Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.030129 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerDied","Data":"829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9"} Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.032483 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" event={"ID":"a65c75b7-3183-4839-a70e-d16e4776e89d","Type":"ContainerStarted","Data":"dfdddb8433a07230fd911e4825133b16049cff54313cec6f0dab0a63188972c2"} Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.032611 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.035066 4784 generic.go:334] "Generic (PLEG): container finished" podID="45594d33-f8da-48e8-b9c2-d60c96a98f64" containerID="d18b73f4f3d01cc6fa75ee5ae1d7989a74742abb8381687ecff895d01c6195fb" exitCode=0 Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.035108 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerDied","Data":"d18b73f4f3d01cc6fa75ee5ae1d7989a74742abb8381687ecff895d01c6195fb"} Dec 05 12:39:46 crc kubenswrapper[4784]: I1205 12:39:46.089217 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" podStartSLOduration=1.965990997 podStartE2EDuration="13.089200509s" podCreationTimestamp="2025-12-05 12:39:33 +0000 UTC" firstStartedPulling="2025-12-05 12:39:34.333817175 +0000 UTC m=+853.753884000" lastFinishedPulling="2025-12-05 12:39:45.457026697 +0000 UTC m=+864.877093512" observedRunningTime="2025-12-05 12:39:46.088485336 +0000 UTC m=+865.508552161" watchObservedRunningTime="2025-12-05 12:39:46.089200509 +0000 UTC m=+865.509267324" Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.043422 4784 generic.go:334] "Generic (PLEG): container finished" podID="45594d33-f8da-48e8-b9c2-d60c96a98f64" containerID="93c3cee8fb34874f87cd3fe04c584d241d82d85d35f3bce664509b04f6eb54b5" exitCode=0 Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.043538 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerDied","Data":"93c3cee8fb34874f87cd3fe04c584d241d82d85d35f3bce664509b04f6eb54b5"} Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.056667 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerStarted","Data":"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26"} Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.106807 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5hpbw" podStartSLOduration=3.6662296210000003 podStartE2EDuration="11.106776146s" podCreationTimestamp="2025-12-05 12:39:36 +0000 UTC" firstStartedPulling="2025-12-05 12:39:38.985909405 +0000 UTC m=+858.405976220" lastFinishedPulling="2025-12-05 12:39:46.42645593 +0000 UTC m=+865.846522745" observedRunningTime="2025-12-05 12:39:47.09832134 +0000 UTC m=+866.518388175" watchObservedRunningTime="2025-12-05 12:39:47.106776146 +0000 UTC m=+866.526842981" Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.262810 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:47 crc kubenswrapper[4784]: I1205 12:39:47.262910 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:48 crc kubenswrapper[4784]: I1205 12:39:48.310158 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-5hpbw" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="registry-server" probeResult="failure" output=< Dec 05 12:39:48 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 12:39:48 crc kubenswrapper[4784]: > Dec 05 12:39:49 crc kubenswrapper[4784]: I1205 12:39:49.071081 4784 generic.go:334] "Generic (PLEG): container finished" podID="45594d33-f8da-48e8-b9c2-d60c96a98f64" containerID="c92609da7f4232fb0693b3b27f4735c8eb5fa2843ea07100ce68aef46f71394f" exitCode=0 Dec 05 12:39:49 crc kubenswrapper[4784]: I1205 12:39:49.071236 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerDied","Data":"c92609da7f4232fb0693b3b27f4735c8eb5fa2843ea07100ce68aef46f71394f"} Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.080469 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"b4845639e3d47b2c1403cab83a39452eecfcaca703dca81e6b3f3a84bc9882e6"} Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.080905 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"b4b8d38e5604c5b4111ae08faa355f8dd32c54b653ea26567c176789579d02ce"} Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.080917 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"f47dd7ab4bcc21b41e661b08c82697392fb83951801580456f7851127b81a69e"} Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.080927 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"5801da36afa59480bcf8761d7c7cb75df866eda0e991e9ccfb3213bfffd0a943"} Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.846587 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.848414 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.859170 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.965130 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.965215 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvjnf\" (UniqueName: \"kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:50 crc kubenswrapper[4784]: I1205 12:39:50.965251 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.066555 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.066626 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvjnf\" (UniqueName: \"kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.066665 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.067116 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.067356 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.090837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvjnf\" (UniqueName: \"kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf\") pod \"certified-operators-dhkpm\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.093475 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"b8602f49ccb496abe7a4aced434de2f6ce85b7a7577001d14541281ed2479b36"} Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.093509 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xjbhc" event={"ID":"45594d33-f8da-48e8-b9c2-d60c96a98f64","Type":"ContainerStarted","Data":"301881238dbe5159fed1f245a9d3db0130ad4f94bf3ebab45b801496545ffd54"} Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.094434 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.164290 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.623874 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-xjbhc" podStartSLOduration=7.476935386 podStartE2EDuration="18.623851942s" podCreationTimestamp="2025-12-05 12:39:33 +0000 UTC" firstStartedPulling="2025-12-05 12:39:34.282890734 +0000 UTC m=+853.702957559" lastFinishedPulling="2025-12-05 12:39:45.4298073 +0000 UTC m=+864.849874115" observedRunningTime="2025-12-05 12:39:51.121532541 +0000 UTC m=+870.541599386" watchObservedRunningTime="2025-12-05 12:39:51.623851942 +0000 UTC m=+871.043918757" Dec 05 12:39:51 crc kubenswrapper[4784]: I1205 12:39:51.628658 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.038875 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.041045 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.060416 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.106400 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerStarted","Data":"26ee633c9053e739f178eaeb9a575f4e35972d87ae4b740b90e7efddf8fa5ca2"} Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.192178 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.192492 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cltvk\" (UniqueName: \"kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.192594 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.294326 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.294637 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cltvk\" (UniqueName: \"kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.294657 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.295369 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.295565 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.315420 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cltvk\" (UniqueName: \"kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk\") pod \"redhat-marketplace-gcnc8\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.368173 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:39:52 crc kubenswrapper[4784]: I1205 12:39:52.610288 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:39:52 crc kubenswrapper[4784]: W1205 12:39:52.614776 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd964987c_b60c_4b82_a6c8_9b1a6de068fe.slice/crio-0a993623072ff8b329861dea1a3c8b31e82795d778dd8863b54b5a4fe29fed48 WatchSource:0}: Error finding container 0a993623072ff8b329861dea1a3c8b31e82795d778dd8863b54b5a4fe29fed48: Status 404 returned error can't find the container with id 0a993623072ff8b329861dea1a3c8b31e82795d778dd8863b54b5a4fe29fed48 Dec 05 12:39:53 crc kubenswrapper[4784]: I1205 12:39:53.117912 4784 generic.go:334] "Generic (PLEG): container finished" podID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerID="11c866510219ee114262d71e8d1664f50ff766cd15061cccb774575db82e5fb2" exitCode=0 Dec 05 12:39:53 crc kubenswrapper[4784]: I1205 12:39:53.118021 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerDied","Data":"11c866510219ee114262d71e8d1664f50ff766cd15061cccb774575db82e5fb2"} Dec 05 12:39:53 crc kubenswrapper[4784]: I1205 12:39:53.118095 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerStarted","Data":"0a993623072ff8b329861dea1a3c8b31e82795d778dd8863b54b5a4fe29fed48"} Dec 05 12:39:53 crc kubenswrapper[4784]: I1205 12:39:53.122572 4784 generic.go:334] "Generic (PLEG): container finished" podID="948b1977-b295-4eed-9be0-3889587aaa5b" containerID="adf4b5573e24bc1dc065080e177d137cad1151a2fddf2cdad258bc73d39b0c6e" exitCode=0 Dec 05 12:39:53 crc kubenswrapper[4784]: I1205 12:39:53.122821 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerDied","Data":"adf4b5573e24bc1dc065080e177d137cad1151a2fddf2cdad258bc73d39b0c6e"} Dec 05 12:39:54 crc kubenswrapper[4784]: I1205 12:39:54.089555 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:54 crc kubenswrapper[4784]: I1205 12:39:54.129908 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:39:54 crc kubenswrapper[4784]: I1205 12:39:54.656593 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-jgfs9" Dec 05 12:39:55 crc kubenswrapper[4784]: I1205 12:39:55.789672 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-cffk7" Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.164607 4784 generic.go:334] "Generic (PLEG): container finished" podID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerID="871e6ebfbcf11153a589cbe5b750c37b77ee9a0c00967f9253d719d168b72d0a" exitCode=0 Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.165026 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerDied","Data":"871e6ebfbcf11153a589cbe5b750c37b77ee9a0c00967f9253d719d168b72d0a"} Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.170020 4784 generic.go:334] "Generic (PLEG): container finished" podID="948b1977-b295-4eed-9be0-3889587aaa5b" containerID="7925d8475d90375d35017a47dfa2b22c6ba668b0447e2b2863362e16e2f0a550" exitCode=0 Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.170087 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerDied","Data":"7925d8475d90375d35017a47dfa2b22c6ba668b0447e2b2863362e16e2f0a550"} Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.307623 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:39:57 crc kubenswrapper[4784]: I1205 12:39:57.353074 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.195012 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerStarted","Data":"89d09d0be6335c131fd1b40504f859f083713bce4968c1ac2400a76633521c09"} Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.215287 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gcnc8" podStartSLOduration=1.9218091579999999 podStartE2EDuration="8.215270034s" podCreationTimestamp="2025-12-05 12:39:52 +0000 UTC" firstStartedPulling="2025-12-05 12:39:53.119706554 +0000 UTC m=+872.539773369" lastFinishedPulling="2025-12-05 12:39:59.41316743 +0000 UTC m=+878.833234245" observedRunningTime="2025-12-05 12:40:00.209501633 +0000 UTC m=+879.629568448" watchObservedRunningTime="2025-12-05 12:40:00.215270034 +0000 UTC m=+879.635336849" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.220309 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.220629 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5hpbw" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="registry-server" containerID="cri-o://8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26" gracePeriod=2 Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.661236 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.826088 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content\") pod \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.826245 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities\") pod \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.826302 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjtxg\" (UniqueName: \"kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg\") pod \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\" (UID: \"739a3684-dc27-4300-a1e7-cf1b4e72d8a1\") " Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.827014 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities" (OuterVolumeSpecName: "utilities") pod "739a3684-dc27-4300-a1e7-cf1b4e72d8a1" (UID: "739a3684-dc27-4300-a1e7-cf1b4e72d8a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.840476 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg" (OuterVolumeSpecName: "kube-api-access-qjtxg") pod "739a3684-dc27-4300-a1e7-cf1b4e72d8a1" (UID: "739a3684-dc27-4300-a1e7-cf1b4e72d8a1"). InnerVolumeSpecName "kube-api-access-qjtxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.876349 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "739a3684-dc27-4300-a1e7-cf1b4e72d8a1" (UID: "739a3684-dc27-4300-a1e7-cf1b4e72d8a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.927696 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.927733 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:00 crc kubenswrapper[4784]: I1205 12:40:00.927743 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjtxg\" (UniqueName: \"kubernetes.io/projected/739a3684-dc27-4300-a1e7-cf1b4e72d8a1-kube-api-access-qjtxg\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.202808 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerStarted","Data":"01f72d36c4c937c0d367ab3fe8cb9f0e1937d4e62206d56e3a8970cf34710aa6"} Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.210804 4784 generic.go:334] "Generic (PLEG): container finished" podID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerID="8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26" exitCode=0 Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.212038 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerDied","Data":"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26"} Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.212145 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5hpbw" event={"ID":"739a3684-dc27-4300-a1e7-cf1b4e72d8a1","Type":"ContainerDied","Data":"e90e59c363c632286153b261b7088505cbf8799c7a9e55b2eecfefcfc038b52b"} Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.212170 4784 scope.go:117] "RemoveContainer" containerID="8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.214672 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5hpbw" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.240680 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dhkpm" podStartSLOduration=4.301506116 podStartE2EDuration="11.240657018s" podCreationTimestamp="2025-12-05 12:39:50 +0000 UTC" firstStartedPulling="2025-12-05 12:39:53.124371311 +0000 UTC m=+872.544438156" lastFinishedPulling="2025-12-05 12:40:00.063522233 +0000 UTC m=+879.483589058" observedRunningTime="2025-12-05 12:40:01.228816295 +0000 UTC m=+880.648883120" watchObservedRunningTime="2025-12-05 12:40:01.240657018 +0000 UTC m=+880.660723833" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.242541 4784 scope.go:117] "RemoveContainer" containerID="829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.251911 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.256388 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5hpbw"] Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.264672 4784 scope.go:117] "RemoveContainer" containerID="9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.287587 4784 scope.go:117] "RemoveContainer" containerID="8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26" Dec 05 12:40:01 crc kubenswrapper[4784]: E1205 12:40:01.288405 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26\": container with ID starting with 8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26 not found: ID does not exist" containerID="8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.288465 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26"} err="failed to get container status \"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26\": rpc error: code = NotFound desc = could not find container \"8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26\": container with ID starting with 8d7ed738e7cd220a89edf0723fbf84b9ddf1d7cbe4339225a4bbf36d82178d26 not found: ID does not exist" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.288519 4784 scope.go:117] "RemoveContainer" containerID="829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9" Dec 05 12:40:01 crc kubenswrapper[4784]: E1205 12:40:01.290666 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9\": container with ID starting with 829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9 not found: ID does not exist" containerID="829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.290697 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9"} err="failed to get container status \"829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9\": rpc error: code = NotFound desc = could not find container \"829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9\": container with ID starting with 829e97d75081ce0774f8e993fb6ed3eb10b54e54560eb9700a51fd926c5e55f9 not found: ID does not exist" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.290718 4784 scope.go:117] "RemoveContainer" containerID="9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f" Dec 05 12:40:01 crc kubenswrapper[4784]: E1205 12:40:01.291039 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f\": container with ID starting with 9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f not found: ID does not exist" containerID="9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f" Dec 05 12:40:01 crc kubenswrapper[4784]: I1205 12:40:01.291060 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f"} err="failed to get container status \"9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f\": rpc error: code = NotFound desc = could not find container \"9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f\": container with ID starting with 9441bf1c1d06308a5a63df9f1d386044be9635f15367b721cd7b891eb5626f3f not found: ID does not exist" Dec 05 12:40:02 crc kubenswrapper[4784]: I1205 12:40:02.369013 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:02 crc kubenswrapper[4784]: I1205 12:40:02.369074 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:02 crc kubenswrapper[4784]: I1205 12:40:02.412511 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.006634 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" path="/var/lib/kubelet/pods/739a3684-dc27-4300-a1e7-cf1b4e72d8a1/volumes" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.022975 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mzzgx"] Dec 05 12:40:03 crc kubenswrapper[4784]: E1205 12:40:03.023262 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="extract-content" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.023278 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="extract-content" Dec 05 12:40:03 crc kubenswrapper[4784]: E1205 12:40:03.023299 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="registry-server" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.023307 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="registry-server" Dec 05 12:40:03 crc kubenswrapper[4784]: E1205 12:40:03.023318 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="extract-utilities" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.023325 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="extract-utilities" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.023475 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="739a3684-dc27-4300-a1e7-cf1b4e72d8a1" containerName="registry-server" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.023972 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.025606 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.025931 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-kvht5" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.026648 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.034631 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mzzgx"] Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.059455 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76vcb\" (UniqueName: \"kubernetes.io/projected/40db50d1-6180-45f5-9774-7ed6b6dbf490-kube-api-access-76vcb\") pod \"openstack-operator-index-mzzgx\" (UID: \"40db50d1-6180-45f5-9774-7ed6b6dbf490\") " pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.160721 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76vcb\" (UniqueName: \"kubernetes.io/projected/40db50d1-6180-45f5-9774-7ed6b6dbf490-kube-api-access-76vcb\") pod \"openstack-operator-index-mzzgx\" (UID: \"40db50d1-6180-45f5-9774-7ed6b6dbf490\") " pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.182025 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76vcb\" (UniqueName: \"kubernetes.io/projected/40db50d1-6180-45f5-9774-7ed6b6dbf490-kube-api-access-76vcb\") pod \"openstack-operator-index-mzzgx\" (UID: \"40db50d1-6180-45f5-9774-7ed6b6dbf490\") " pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.341664 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:03 crc kubenswrapper[4784]: I1205 12:40:03.764142 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mzzgx"] Dec 05 12:40:03 crc kubenswrapper[4784]: W1205 12:40:03.766844 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40db50d1_6180_45f5_9774_7ed6b6dbf490.slice/crio-253cac02275d8a131486c14700edbd753b8afda7cd0194eb56d504ea8e2518eb WatchSource:0}: Error finding container 253cac02275d8a131486c14700edbd753b8afda7cd0194eb56d504ea8e2518eb: Status 404 returned error can't find the container with id 253cac02275d8a131486c14700edbd753b8afda7cd0194eb56d504ea8e2518eb Dec 05 12:40:04 crc kubenswrapper[4784]: I1205 12:40:04.097467 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-xjbhc" Dec 05 12:40:07 crc kubenswrapper[4784]: I1205 12:40:04.109539 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-956ff" Dec 05 12:40:07 crc kubenswrapper[4784]: I1205 12:40:04.232468 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mzzgx" event={"ID":"40db50d1-6180-45f5-9774-7ed6b6dbf490","Type":"ContainerStarted","Data":"253cac02275d8a131486c14700edbd753b8afda7cd0194eb56d504ea8e2518eb"} Dec 05 12:40:11 crc kubenswrapper[4784]: I1205 12:40:11.165086 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:11 crc kubenswrapper[4784]: I1205 12:40:11.165765 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:11 crc kubenswrapper[4784]: I1205 12:40:11.208555 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:11 crc kubenswrapper[4784]: I1205 12:40:11.343845 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:12 crc kubenswrapper[4784]: I1205 12:40:12.429928 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:15 crc kubenswrapper[4784]: I1205 12:40:15.426227 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:40:15 crc kubenswrapper[4784]: I1205 12:40:15.426981 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gcnc8" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="registry-server" containerID="cri-o://89d09d0be6335c131fd1b40504f859f083713bce4968c1ac2400a76633521c09" gracePeriod=2 Dec 05 12:40:15 crc kubenswrapper[4784]: I1205 12:40:15.819700 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:40:15 crc kubenswrapper[4784]: I1205 12:40:15.820015 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dhkpm" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="registry-server" containerID="cri-o://01f72d36c4c937c0d367ab3fe8cb9f0e1937d4e62206d56e3a8970cf34710aa6" gracePeriod=2 Dec 05 12:40:16 crc kubenswrapper[4784]: I1205 12:40:16.313584 4784 generic.go:334] "Generic (PLEG): container finished" podID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerID="89d09d0be6335c131fd1b40504f859f083713bce4968c1ac2400a76633521c09" exitCode=0 Dec 05 12:40:16 crc kubenswrapper[4784]: I1205 12:40:16.313653 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerDied","Data":"89d09d0be6335c131fd1b40504f859f083713bce4968c1ac2400a76633521c09"} Dec 05 12:40:16 crc kubenswrapper[4784]: I1205 12:40:16.316181 4784 generic.go:334] "Generic (PLEG): container finished" podID="948b1977-b295-4eed-9be0-3889587aaa5b" containerID="01f72d36c4c937c0d367ab3fe8cb9f0e1937d4e62206d56e3a8970cf34710aa6" exitCode=0 Dec 05 12:40:16 crc kubenswrapper[4784]: I1205 12:40:16.316213 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerDied","Data":"01f72d36c4c937c0d367ab3fe8cb9f0e1937d4e62206d56e3a8970cf34710aa6"} Dec 05 12:40:18 crc kubenswrapper[4784]: I1205 12:40:18.931302 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.092969 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities\") pod \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.093347 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cltvk\" (UniqueName: \"kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk\") pod \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.093537 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content\") pod \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\" (UID: \"d964987c-b60c-4b82-a6c8-9b1a6de068fe\") " Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.094030 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities" (OuterVolumeSpecName: "utilities") pod "d964987c-b60c-4b82-a6c8-9b1a6de068fe" (UID: "d964987c-b60c-4b82-a6c8-9b1a6de068fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.094361 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.102797 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk" (OuterVolumeSpecName: "kube-api-access-cltvk") pod "d964987c-b60c-4b82-a6c8-9b1a6de068fe" (UID: "d964987c-b60c-4b82-a6c8-9b1a6de068fe"). InnerVolumeSpecName "kube-api-access-cltvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.116850 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d964987c-b60c-4b82-a6c8-9b1a6de068fe" (UID: "d964987c-b60c-4b82-a6c8-9b1a6de068fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.196015 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cltvk\" (UniqueName: \"kubernetes.io/projected/d964987c-b60c-4b82-a6c8-9b1a6de068fe-kube-api-access-cltvk\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.196065 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d964987c-b60c-4b82-a6c8-9b1a6de068fe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.338792 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcnc8" event={"ID":"d964987c-b60c-4b82-a6c8-9b1a6de068fe","Type":"ContainerDied","Data":"0a993623072ff8b329861dea1a3c8b31e82795d778dd8863b54b5a4fe29fed48"} Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.338845 4784 scope.go:117] "RemoveContainer" containerID="89d09d0be6335c131fd1b40504f859f083713bce4968c1ac2400a76633521c09" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.338852 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcnc8" Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.369131 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:40:19 crc kubenswrapper[4784]: I1205 12:40:19.374279 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcnc8"] Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.341800 4784 scope.go:117] "RemoveContainer" containerID="871e6ebfbcf11153a589cbe5b750c37b77ee9a0c00967f9253d719d168b72d0a" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.351702 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dhkpm" event={"ID":"948b1977-b295-4eed-9be0-3889587aaa5b","Type":"ContainerDied","Data":"26ee633c9053e739f178eaeb9a575f4e35972d87ae4b740b90e7efddf8fa5ca2"} Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.351742 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26ee633c9053e739f178eaeb9a575f4e35972d87ae4b740b90e7efddf8fa5ca2" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.409824 4784 scope.go:117] "RemoveContainer" containerID="11c866510219ee114262d71e8d1664f50ff766cd15061cccb774575db82e5fb2" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.424912 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.615299 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content\") pod \"948b1977-b295-4eed-9be0-3889587aaa5b\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.615406 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities\") pod \"948b1977-b295-4eed-9be0-3889587aaa5b\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.615512 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvjnf\" (UniqueName: \"kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf\") pod \"948b1977-b295-4eed-9be0-3889587aaa5b\" (UID: \"948b1977-b295-4eed-9be0-3889587aaa5b\") " Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.616482 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities" (OuterVolumeSpecName: "utilities") pod "948b1977-b295-4eed-9be0-3889587aaa5b" (UID: "948b1977-b295-4eed-9be0-3889587aaa5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.620792 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf" (OuterVolumeSpecName: "kube-api-access-lvjnf") pod "948b1977-b295-4eed-9be0-3889587aaa5b" (UID: "948b1977-b295-4eed-9be0-3889587aaa5b"). InnerVolumeSpecName "kube-api-access-lvjnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.699773 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "948b1977-b295-4eed-9be0-3889587aaa5b" (UID: "948b1977-b295-4eed-9be0-3889587aaa5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.717312 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.717369 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b1977-b295-4eed-9be0-3889587aaa5b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:20 crc kubenswrapper[4784]: I1205 12:40:20.717390 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvjnf\" (UniqueName: \"kubernetes.io/projected/948b1977-b295-4eed-9be0-3889587aaa5b-kube-api-access-lvjnf\") on node \"crc\" DevicePath \"\"" Dec 05 12:40:21 crc kubenswrapper[4784]: I1205 12:40:21.016805 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" path="/var/lib/kubelet/pods/d964987c-b60c-4b82-a6c8-9b1a6de068fe/volumes" Dec 05 12:40:21 crc kubenswrapper[4784]: I1205 12:40:21.361936 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dhkpm" Dec 05 12:40:21 crc kubenswrapper[4784]: I1205 12:40:21.396843 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:40:21 crc kubenswrapper[4784]: I1205 12:40:21.402664 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dhkpm"] Dec 05 12:40:23 crc kubenswrapper[4784]: I1205 12:40:23.012051 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" path="/var/lib/kubelet/pods/948b1977-b295-4eed-9be0-3889587aaa5b/volumes" Dec 05 12:40:29 crc kubenswrapper[4784]: I1205 12:40:29.573316 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:40:29 crc kubenswrapper[4784]: I1205 12:40:29.573897 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:40:41 crc kubenswrapper[4784]: I1205 12:40:41.513942 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mzzgx" event={"ID":"40db50d1-6180-45f5-9774-7ed6b6dbf490","Type":"ContainerStarted","Data":"1fa0507865671e61b0bd89d7a16178b94fd543c8f85c8fc6cea524b06bb87f50"} Dec 05 12:40:41 crc kubenswrapper[4784]: I1205 12:40:41.532553 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mzzgx" podStartSLOduration=1.566482235 podStartE2EDuration="38.532522683s" podCreationTimestamp="2025-12-05 12:40:03 +0000 UTC" firstStartedPulling="2025-12-05 12:40:03.769181093 +0000 UTC m=+883.189247908" lastFinishedPulling="2025-12-05 12:40:40.735221541 +0000 UTC m=+920.155288356" observedRunningTime="2025-12-05 12:40:41.530797849 +0000 UTC m=+920.950864734" watchObservedRunningTime="2025-12-05 12:40:41.532522683 +0000 UTC m=+920.952589548" Dec 05 12:40:43 crc kubenswrapper[4784]: I1205 12:40:43.342652 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:43 crc kubenswrapper[4784]: I1205 12:40:43.343014 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:43 crc kubenswrapper[4784]: I1205 12:40:43.373524 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:53 crc kubenswrapper[4784]: I1205 12:40:53.367679 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-mzzgx" Dec 05 12:40:59 crc kubenswrapper[4784]: I1205 12:40:59.572820 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:40:59 crc kubenswrapper[4784]: I1205 12:40:59.573445 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.839614 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6"] Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840127 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840140 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840150 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="extract-content" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840155 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="extract-content" Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840172 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="extract-utilities" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840179 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="extract-utilities" Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840208 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="extract-utilities" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840215 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="extract-utilities" Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840226 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840232 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: E1205 12:41:16.840241 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="extract-content" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840248 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="extract-content" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840342 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b1977-b295-4eed-9be0-3889587aaa5b" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.840355 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d964987c-b60c-4b82-a6c8-9b1a6de068fe" containerName="registry-server" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.841265 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.843527 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-85gwc" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.855452 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6"] Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.983209 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.983302 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:16 crc kubenswrapper[4784]: I1205 12:41:16.983352 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7rjq\" (UniqueName: \"kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.085084 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.085259 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.085333 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7rjq\" (UniqueName: \"kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.085597 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.085769 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.106476 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7rjq\" (UniqueName: \"kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq\") pod \"f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.167740 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.437593 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6"] Dec 05 12:41:17 crc kubenswrapper[4784]: I1205 12:41:17.752543 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerStarted","Data":"e1b6ede61df18db99640e78f1ddccbb8ac5ec3ec5a69d6a4374214e003fb6823"} Dec 05 12:41:18 crc kubenswrapper[4784]: I1205 12:41:18.761735 4784 generic.go:334] "Generic (PLEG): container finished" podID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerID="e3ca1ac39b6531d13ed105dcf5b0a7cfa571d5b43d3136c01e1b1e9e01f665f5" exitCode=0 Dec 05 12:41:18 crc kubenswrapper[4784]: I1205 12:41:18.761830 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerDied","Data":"e3ca1ac39b6531d13ed105dcf5b0a7cfa571d5b43d3136c01e1b1e9e01f665f5"} Dec 05 12:41:18 crc kubenswrapper[4784]: I1205 12:41:18.764287 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:41:21 crc kubenswrapper[4784]: I1205 12:41:21.787471 4784 generic.go:334] "Generic (PLEG): container finished" podID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerID="70ce73eab8662d023dd1c7704bb0028016f64cd604ab74537e740ff9d134da05" exitCode=0 Dec 05 12:41:21 crc kubenswrapper[4784]: I1205 12:41:21.787596 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerDied","Data":"70ce73eab8662d023dd1c7704bb0028016f64cd604ab74537e740ff9d134da05"} Dec 05 12:41:22 crc kubenswrapper[4784]: I1205 12:41:22.794566 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerStarted","Data":"2f8d93c035db3151aeb1ab7d3fbd32beef9a58c4b5de1b5a442a718fb3699ac0"} Dec 05 12:41:22 crc kubenswrapper[4784]: I1205 12:41:22.817283 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" podStartSLOduration=5.040872766 podStartE2EDuration="6.817266526s" podCreationTimestamp="2025-12-05 12:41:16 +0000 UTC" firstStartedPulling="2025-12-05 12:41:18.763607679 +0000 UTC m=+958.183674504" lastFinishedPulling="2025-12-05 12:41:20.540001439 +0000 UTC m=+959.960068264" observedRunningTime="2025-12-05 12:41:22.812016961 +0000 UTC m=+962.232083776" watchObservedRunningTime="2025-12-05 12:41:22.817266526 +0000 UTC m=+962.237333331" Dec 05 12:41:23 crc kubenswrapper[4784]: I1205 12:41:23.804470 4784 generic.go:334] "Generic (PLEG): container finished" podID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerID="2f8d93c035db3151aeb1ab7d3fbd32beef9a58c4b5de1b5a442a718fb3699ac0" exitCode=0 Dec 05 12:41:23 crc kubenswrapper[4784]: I1205 12:41:23.804559 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerDied","Data":"2f8d93c035db3151aeb1ab7d3fbd32beef9a58c4b5de1b5a442a718fb3699ac0"} Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.128264 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.301799 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7rjq\" (UniqueName: \"kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq\") pod \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.301904 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util\") pod \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.302019 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle\") pod \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\" (UID: \"01816a41-d9a5-4b78-b5b5-553a33adb5d9\") " Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.302975 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle" (OuterVolumeSpecName: "bundle") pod "01816a41-d9a5-4b78-b5b5-553a33adb5d9" (UID: "01816a41-d9a5-4b78-b5b5-553a33adb5d9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.309879 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq" (OuterVolumeSpecName: "kube-api-access-z7rjq") pod "01816a41-d9a5-4b78-b5b5-553a33adb5d9" (UID: "01816a41-d9a5-4b78-b5b5-553a33adb5d9"). InnerVolumeSpecName "kube-api-access-z7rjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.318166 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util" (OuterVolumeSpecName: "util") pod "01816a41-d9a5-4b78-b5b5-553a33adb5d9" (UID: "01816a41-d9a5-4b78-b5b5-553a33adb5d9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.404364 4784 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.404420 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7rjq\" (UniqueName: \"kubernetes.io/projected/01816a41-d9a5-4b78-b5b5-553a33adb5d9-kube-api-access-z7rjq\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.404442 4784 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/01816a41-d9a5-4b78-b5b5-553a33adb5d9-util\") on node \"crc\" DevicePath \"\"" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.824078 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" event={"ID":"01816a41-d9a5-4b78-b5b5-553a33adb5d9","Type":"ContainerDied","Data":"e1b6ede61df18db99640e78f1ddccbb8ac5ec3ec5a69d6a4374214e003fb6823"} Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.824429 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1b6ede61df18db99640e78f1ddccbb8ac5ec3ec5a69d6a4374214e003fb6823" Dec 05 12:41:25 crc kubenswrapper[4784]: I1205 12:41:25.824135 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.994275 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7"] Dec 05 12:41:28 crc kubenswrapper[4784]: E1205 12:41:28.995102 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="pull" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.995116 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="pull" Dec 05 12:41:28 crc kubenswrapper[4784]: E1205 12:41:28.995131 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="extract" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.995138 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="extract" Dec 05 12:41:28 crc kubenswrapper[4784]: E1205 12:41:28.995155 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="util" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.995162 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="util" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.995342 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="01816a41-d9a5-4b78-b5b5-553a33adb5d9" containerName="extract" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.995890 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:28 crc kubenswrapper[4784]: I1205 12:41:28.998738 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-tcpv2" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.023765 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7"] Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.154228 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgv6p\" (UniqueName: \"kubernetes.io/projected/96560d18-563e-4929-891e-4fb7c9a88619-kube-api-access-wgv6p\") pod \"openstack-operator-controller-operator-64f95d469-ftlj7\" (UID: \"96560d18-563e-4929-891e-4fb7c9a88619\") " pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.255539 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgv6p\" (UniqueName: \"kubernetes.io/projected/96560d18-563e-4929-891e-4fb7c9a88619-kube-api-access-wgv6p\") pod \"openstack-operator-controller-operator-64f95d469-ftlj7\" (UID: \"96560d18-563e-4929-891e-4fb7c9a88619\") " pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.279217 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgv6p\" (UniqueName: \"kubernetes.io/projected/96560d18-563e-4929-891e-4fb7c9a88619-kube-api-access-wgv6p\") pod \"openstack-operator-controller-operator-64f95d469-ftlj7\" (UID: \"96560d18-563e-4929-891e-4fb7c9a88619\") " pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.314282 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.575081 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.584934 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.585061 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.585803 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.585871 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997" gracePeriod=600 Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.592217 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7"] Dec 05 12:41:29 crc kubenswrapper[4784]: I1205 12:41:29.851887 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" event={"ID":"96560d18-563e-4929-891e-4fb7c9a88619","Type":"ContainerStarted","Data":"d73388b7e9e8a5b454dfdeeb3a47bd18dd62ae9e3e3af135f6a9cb37eb79990e"} Dec 05 12:41:31 crc kubenswrapper[4784]: I1205 12:41:31.871565 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997" exitCode=0 Dec 05 12:41:31 crc kubenswrapper[4784]: I1205 12:41:31.871703 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997"} Dec 05 12:41:31 crc kubenswrapper[4784]: I1205 12:41:31.872263 4784 scope.go:117] "RemoveContainer" containerID="137e007b26aac5135103d758a13b82ca82f6b2724608f280182653dcce8c9022" Dec 05 12:41:32 crc kubenswrapper[4784]: I1205 12:41:32.885506 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b"} Dec 05 12:41:39 crc kubenswrapper[4784]: I1205 12:41:39.951355 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" event={"ID":"96560d18-563e-4929-891e-4fb7c9a88619","Type":"ContainerStarted","Data":"997c3779c4449cd19ef2ced9aa8b3ba23d08c4971fb05c31fa28aef83f321a3b"} Dec 05 12:41:39 crc kubenswrapper[4784]: I1205 12:41:39.951990 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:41:39 crc kubenswrapper[4784]: I1205 12:41:39.987393 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" podStartSLOduration=2.593651249 podStartE2EDuration="11.987370697s" podCreationTimestamp="2025-12-05 12:41:28 +0000 UTC" firstStartedPulling="2025-12-05 12:41:29.608825092 +0000 UTC m=+969.028891907" lastFinishedPulling="2025-12-05 12:41:39.00254454 +0000 UTC m=+978.422611355" observedRunningTime="2025-12-05 12:41:39.982847474 +0000 UTC m=+979.402914319" watchObservedRunningTime="2025-12-05 12:41:39.987370697 +0000 UTC m=+979.407437512" Dec 05 12:41:49 crc kubenswrapper[4784]: I1205 12:41:49.316903 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-64f95d469-ftlj7" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.630910 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.632862 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.634917 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-x5fhz" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.635332 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.637468 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.638928 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7vx9h" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.644397 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.663290 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.664629 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.669036 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-xvmxr" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.675851 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.684611 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.696158 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.697430 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.697548 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45h9t\" (UniqueName: \"kubernetes.io/projected/cae1438b-c8fd-4660-8843-f41bca4b1e15-kube-api-access-45h9t\") pod \"designate-operator-controller-manager-78b4bc895b-g4cvf\" (UID: \"cae1438b-c8fd-4660-8843-f41bca4b1e15\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.697601 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2ztl\" (UniqueName: \"kubernetes.io/projected/5434c275-5acc-4ffe-94ff-1cd9440300b0-kube-api-access-j2ztl\") pod \"barbican-operator-controller-manager-7d9dfd778-8ct4c\" (UID: \"5434c275-5acc-4ffe-94ff-1cd9440300b0\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.697651 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6dr2\" (UniqueName: \"kubernetes.io/projected/17a1e99d-2e27-47df-93be-afbb5224152b-kube-api-access-j6dr2\") pod \"cinder-operator-controller-manager-859b6ccc6-grj7k\" (UID: \"17a1e99d-2e27-47df-93be-afbb5224152b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.706334 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-gzbnx" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.720869 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.730259 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.731332 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.736295 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-298wl" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.737588 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.738967 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.740873 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-d84zf" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.749085 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-6n77l"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.750067 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.756499 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tfpnp" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.756527 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.758623 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.765559 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-6n77l"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.769323 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.804115 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2ztl\" (UniqueName: \"kubernetes.io/projected/5434c275-5acc-4ffe-94ff-1cd9440300b0-kube-api-access-j2ztl\") pod \"barbican-operator-controller-manager-7d9dfd778-8ct4c\" (UID: \"5434c275-5acc-4ffe-94ff-1cd9440300b0\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.804205 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6dr2\" (UniqueName: \"kubernetes.io/projected/17a1e99d-2e27-47df-93be-afbb5224152b-kube-api-access-j6dr2\") pod \"cinder-operator-controller-manager-859b6ccc6-grj7k\" (UID: \"17a1e99d-2e27-47df-93be-afbb5224152b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.804274 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45h9t\" (UniqueName: \"kubernetes.io/projected/cae1438b-c8fd-4660-8843-f41bca4b1e15-kube-api-access-45h9t\") pod \"designate-operator-controller-manager-78b4bc895b-g4cvf\" (UID: \"cae1438b-c8fd-4660-8843-f41bca4b1e15\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.816546 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.825103 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.838533 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-5tkq5" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.846075 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2ztl\" (UniqueName: \"kubernetes.io/projected/5434c275-5acc-4ffe-94ff-1cd9440300b0-kube-api-access-j2ztl\") pod \"barbican-operator-controller-manager-7d9dfd778-8ct4c\" (UID: \"5434c275-5acc-4ffe-94ff-1cd9440300b0\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.846165 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45h9t\" (UniqueName: \"kubernetes.io/projected/cae1438b-c8fd-4660-8843-f41bca4b1e15-kube-api-access-45h9t\") pod \"designate-operator-controller-manager-78b4bc895b-g4cvf\" (UID: \"cae1438b-c8fd-4660-8843-f41bca4b1e15\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.846255 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.859082 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6dr2\" (UniqueName: \"kubernetes.io/projected/17a1e99d-2e27-47df-93be-afbb5224152b-kube-api-access-j6dr2\") pod \"cinder-operator-controller-manager-859b6ccc6-grj7k\" (UID: \"17a1e99d-2e27-47df-93be-afbb5224152b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.877507 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.905384 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.909668 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-z6n24" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915328 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bhf9\" (UniqueName: \"kubernetes.io/projected/253f2712-fbf0-476b-8ba3-387f7811e4f7-kube-api-access-7bhf9\") pod \"keystone-operator-controller-manager-7765d96ddf-bstnz\" (UID: \"253f2712-fbf0-476b-8ba3-387f7811e4f7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915368 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f449d\" (UniqueName: \"kubernetes.io/projected/6c20830c-fef0-4691-9505-5d0c3726ca11-kube-api-access-f449d\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915397 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62772\" (UniqueName: \"kubernetes.io/projected/bd95d9d9-a3b2-4f91-94f1-a60041b5b640-kube-api-access-62772\") pod \"ironic-operator-controller-manager-6c548fd776-fhllr\" (UID: \"bd95d9d9-a3b2-4f91-94f1-a60041b5b640\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915415 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915440 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq7r2\" (UniqueName: \"kubernetes.io/projected/34392862-6b0a-4e19-8702-d685378817b1-kube-api-access-sq7r2\") pod \"heat-operator-controller-manager-5f64f6f8bb-lw4zg\" (UID: \"34392862-6b0a-4e19-8702-d685378817b1\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915470 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz4sp\" (UniqueName: \"kubernetes.io/projected/d14c7f23-4235-4257-a178-6b90aa4cf3b4-kube-api-access-tz4sp\") pod \"horizon-operator-controller-manager-68c6d99b8f-bk9hd\" (UID: \"d14c7f23-4235-4257-a178-6b90aa4cf3b4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.915521 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6f9r\" (UniqueName: \"kubernetes.io/projected/bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5-kube-api-access-x6f9r\") pod \"glance-operator-controller-manager-77987cd8cd-tkgxb\" (UID: \"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.958585 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.959866 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.960892 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.962623 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-q5wq2" Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.978358 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.985177 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs"] Dec 05 12:42:08 crc kubenswrapper[4784]: I1205 12:42:08.986890 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.010645 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.016145 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.019955 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.020936 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.021065 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.021164 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.030748 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.032015 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.036759 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039642 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq7r2\" (UniqueName: \"kubernetes.io/projected/34392862-6b0a-4e19-8702-d685378817b1-kube-api-access-sq7r2\") pod \"heat-operator-controller-manager-5f64f6f8bb-lw4zg\" (UID: \"34392862-6b0a-4e19-8702-d685378817b1\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039733 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz4sp\" (UniqueName: \"kubernetes.io/projected/d14c7f23-4235-4257-a178-6b90aa4cf3b4-kube-api-access-tz4sp\") pod \"horizon-operator-controller-manager-68c6d99b8f-bk9hd\" (UID: \"d14c7f23-4235-4257-a178-6b90aa4cf3b4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039861 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6f9r\" (UniqueName: \"kubernetes.io/projected/bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5-kube-api-access-x6f9r\") pod \"glance-operator-controller-manager-77987cd8cd-tkgxb\" (UID: \"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039889 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bhf9\" (UniqueName: \"kubernetes.io/projected/253f2712-fbf0-476b-8ba3-387f7811e4f7-kube-api-access-7bhf9\") pod \"keystone-operator-controller-manager-7765d96ddf-bstnz\" (UID: \"253f2712-fbf0-476b-8ba3-387f7811e4f7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039916 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f449d\" (UniqueName: \"kubernetes.io/projected/6c20830c-fef0-4691-9505-5d0c3726ca11-kube-api-access-f449d\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039957 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62772\" (UniqueName: \"kubernetes.io/projected/bd95d9d9-a3b2-4f91-94f1-a60041b5b640-kube-api-access-62772\") pod \"ironic-operator-controller-manager-6c548fd776-fhllr\" (UID: \"bd95d9d9-a3b2-4f91-94f1-a60041b5b640\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.039984 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.040012 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p96d6\" (UniqueName: \"kubernetes.io/projected/70f6568a-d588-4d71-8e38-def379ac95cf-kube-api-access-p96d6\") pod \"manila-operator-controller-manager-7c79b5df47-fkjcs\" (UID: \"70f6568a-d588-4d71-8e38-def379ac95cf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.041115 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.041201 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:09.541166764 +0000 UTC m=+1008.961233579 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.043994 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jfmtp" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.044165 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-757l9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.043166 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-g9fk6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.062634 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6f9r\" (UniqueName: \"kubernetes.io/projected/bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5-kube-api-access-x6f9r\") pod \"glance-operator-controller-manager-77987cd8cd-tkgxb\" (UID: \"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.065703 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bhf9\" (UniqueName: \"kubernetes.io/projected/253f2712-fbf0-476b-8ba3-387f7811e4f7-kube-api-access-7bhf9\") pod \"keystone-operator-controller-manager-7765d96ddf-bstnz\" (UID: \"253f2712-fbf0-476b-8ba3-387f7811e4f7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.066944 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62772\" (UniqueName: \"kubernetes.io/projected/bd95d9d9-a3b2-4f91-94f1-a60041b5b640-kube-api-access-62772\") pod \"ironic-operator-controller-manager-6c548fd776-fhllr\" (UID: \"bd95d9d9-a3b2-4f91-94f1-a60041b5b640\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.068014 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f449d\" (UniqueName: \"kubernetes.io/projected/6c20830c-fef0-4691-9505-5d0c3726ca11-kube-api-access-f449d\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.068037 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz4sp\" (UniqueName: \"kubernetes.io/projected/d14c7f23-4235-4257-a178-6b90aa4cf3b4-kube-api-access-tz4sp\") pod \"horizon-operator-controller-manager-68c6d99b8f-bk9hd\" (UID: \"d14c7f23-4235-4257-a178-6b90aa4cf3b4\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.069051 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq7r2\" (UniqueName: \"kubernetes.io/projected/34392862-6b0a-4e19-8702-d685378817b1-kube-api-access-sq7r2\") pod \"heat-operator-controller-manager-5f64f6f8bb-lw4zg\" (UID: \"34392862-6b0a-4e19-8702-d685378817b1\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.071233 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.076911 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.084469 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-llgbj"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.085786 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.087349 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-jg7qp" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.112644 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.121829 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.122508 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-llgbj"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.124349 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.124632 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ntzkz" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.139559 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.141328 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spxrc\" (UniqueName: \"kubernetes.io/projected/d32fc3d7-6f1d-4f5c-8f70-39a417849b13-kube-api-access-spxrc\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-n6sh9\" (UID: \"d32fc3d7-6f1d-4f5c-8f70-39a417849b13\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.141412 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xmkt\" (UniqueName: \"kubernetes.io/projected/6200dbb3-7166-4fa0-925c-fe6155de2927-kube-api-access-2xmkt\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5z5pm\" (UID: \"6200dbb3-7166-4fa0-925c-fe6155de2927\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.141441 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p96d6\" (UniqueName: \"kubernetes.io/projected/70f6568a-d588-4d71-8e38-def379ac95cf-kube-api-access-p96d6\") pod \"manila-operator-controller-manager-7c79b5df47-fkjcs\" (UID: \"70f6568a-d588-4d71-8e38-def379ac95cf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.141472 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k6pb\" (UniqueName: \"kubernetes.io/projected/af5e1f7a-185c-402f-80b7-fb6c66084d0f-kube-api-access-9k6pb\") pod \"nova-operator-controller-manager-697bc559fc-prd2m\" (UID: \"af5e1f7a-185c-402f-80b7-fb6c66084d0f\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.141499 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r45kz\" (UniqueName: \"kubernetes.io/projected/52133bea-24d3-440b-880d-67a3131c52db-kube-api-access-r45kz\") pod \"octavia-operator-controller-manager-998648c74-llgbj\" (UID: \"52133bea-24d3-440b-880d-67a3131c52db\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.157673 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.159977 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.163081 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-n2hfq" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.165809 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.169223 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p96d6\" (UniqueName: \"kubernetes.io/projected/70f6568a-d588-4d71-8e38-def379ac95cf-kube-api-access-p96d6\") pod \"manila-operator-controller-manager-7c79b5df47-fkjcs\" (UID: \"70f6568a-d588-4d71-8e38-def379ac95cf\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.169815 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.171946 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-fjn9t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.176745 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.194413 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.214270 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.214974 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.221398 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.226747 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-6r8z9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.231292 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.241586 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.243822 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b29x5\" (UniqueName: \"kubernetes.io/projected/2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad-kube-api-access-b29x5\") pod \"ovn-operator-controller-manager-b6456fdb6-2jxh6\" (UID: \"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.243896 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xmkt\" (UniqueName: \"kubernetes.io/projected/6200dbb3-7166-4fa0-925c-fe6155de2927-kube-api-access-2xmkt\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5z5pm\" (UID: \"6200dbb3-7166-4fa0-925c-fe6155de2927\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.243924 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k6pb\" (UniqueName: \"kubernetes.io/projected/af5e1f7a-185c-402f-80b7-fb6c66084d0f-kube-api-access-9k6pb\") pod \"nova-operator-controller-manager-697bc559fc-prd2m\" (UID: \"af5e1f7a-185c-402f-80b7-fb6c66084d0f\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.243950 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r45kz\" (UniqueName: \"kubernetes.io/projected/52133bea-24d3-440b-880d-67a3131c52db-kube-api-access-r45kz\") pod \"octavia-operator-controller-manager-998648c74-llgbj\" (UID: \"52133bea-24d3-440b-880d-67a3131c52db\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.244034 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6rlt\" (UniqueName: \"kubernetes.io/projected/d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5-kube-api-access-r6rlt\") pod \"swift-operator-controller-manager-5f8c65bbfc-2dt8t\" (UID: \"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.244056 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.244083 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8gbn\" (UniqueName: \"kubernetes.io/projected/c31c0c1f-afa0-4ba8-a638-d27370864b63-kube-api-access-r8gbn\") pod \"placement-operator-controller-manager-78f8948974-fs6fj\" (UID: \"c31c0c1f-afa0-4ba8-a638-d27370864b63\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.244111 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spxrc\" (UniqueName: \"kubernetes.io/projected/d32fc3d7-6f1d-4f5c-8f70-39a417849b13-kube-api-access-spxrc\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-n6sh9\" (UID: \"d32fc3d7-6f1d-4f5c-8f70-39a417849b13\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.244134 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwwdf\" (UniqueName: \"kubernetes.io/projected/d2899908-ecd6-4e04-932d-f26909c0f547-kube-api-access-qwwdf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.265357 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.266926 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r45kz\" (UniqueName: \"kubernetes.io/projected/52133bea-24d3-440b-880d-67a3131c52db-kube-api-access-r45kz\") pod \"octavia-operator-controller-manager-998648c74-llgbj\" (UID: \"52133bea-24d3-440b-880d-67a3131c52db\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.267125 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xmkt\" (UniqueName: \"kubernetes.io/projected/6200dbb3-7166-4fa0-925c-fe6155de2927-kube-api-access-2xmkt\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5z5pm\" (UID: \"6200dbb3-7166-4fa0-925c-fe6155de2927\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.268029 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.270510 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-chqdq" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.277027 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.277177 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spxrc\" (UniqueName: \"kubernetes.io/projected/d32fc3d7-6f1d-4f5c-8f70-39a417849b13-kube-api-access-spxrc\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-n6sh9\" (UID: \"d32fc3d7-6f1d-4f5c-8f70-39a417849b13\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.277544 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k6pb\" (UniqueName: \"kubernetes.io/projected/af5e1f7a-185c-402f-80b7-fb6c66084d0f-kube-api-access-9k6pb\") pod \"nova-operator-controller-manager-697bc559fc-prd2m\" (UID: \"af5e1f7a-185c-402f-80b7-fb6c66084d0f\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.281307 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.319843 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371616 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6rlt\" (UniqueName: \"kubernetes.io/projected/d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5-kube-api-access-r6rlt\") pod \"swift-operator-controller-manager-5f8c65bbfc-2dt8t\" (UID: \"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371675 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371705 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8gbn\" (UniqueName: \"kubernetes.io/projected/c31c0c1f-afa0-4ba8-a638-d27370864b63-kube-api-access-r8gbn\") pod \"placement-operator-controller-manager-78f8948974-fs6fj\" (UID: \"c31c0c1f-afa0-4ba8-a638-d27370864b63\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371742 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwwdf\" (UniqueName: \"kubernetes.io/projected/d2899908-ecd6-4e04-932d-f26909c0f547-kube-api-access-qwwdf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371778 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b29x5\" (UniqueName: \"kubernetes.io/projected/2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad-kube-api-access-b29x5\") pod \"ovn-operator-controller-manager-b6456fdb6-2jxh6\" (UID: \"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.371812 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pllns\" (UniqueName: \"kubernetes.io/projected/43833100-b1fd-45fd-b772-9d0ee036c4ce-kube-api-access-pllns\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tc6cr\" (UID: \"43833100-b1fd-45fd-b772-9d0ee036c4ce\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.374272 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.374979 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.375022 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:09.875005908 +0000 UTC m=+1009.295072723 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.375295 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.388234 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.402253 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8gbn\" (UniqueName: \"kubernetes.io/projected/c31c0c1f-afa0-4ba8-a638-d27370864b63-kube-api-access-r8gbn\") pod \"placement-operator-controller-manager-78f8948974-fs6fj\" (UID: \"c31c0c1f-afa0-4ba8-a638-d27370864b63\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.406651 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6rlt\" (UniqueName: \"kubernetes.io/projected/d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5-kube-api-access-r6rlt\") pod \"swift-operator-controller-manager-5f8c65bbfc-2dt8t\" (UID: \"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.408009 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.411742 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b29x5\" (UniqueName: \"kubernetes.io/projected/2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad-kube-api-access-b29x5\") pod \"ovn-operator-controller-manager-b6456fdb6-2jxh6\" (UID: \"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.415993 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwwdf\" (UniqueName: \"kubernetes.io/projected/d2899908-ecd6-4e04-932d-f26909c0f547-kube-api-access-qwwdf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.416220 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.417781 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.425850 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.428375 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-264d9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.448080 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.472814 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pllns\" (UniqueName: \"kubernetes.io/projected/43833100-b1fd-45fd-b772-9d0ee036c4ce-kube-api-access-pllns\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tc6cr\" (UID: \"43833100-b1fd-45fd-b772-9d0ee036c4ce\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.472866 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78qsj\" (UniqueName: \"kubernetes.io/projected/2b3caeee-8e0e-4a20-9cea-f9f668e2a76f-kube-api-access-78qsj\") pod \"test-operator-controller-manager-5854674fcc-j9wr9\" (UID: \"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.492005 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.510269 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.528459 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pllns\" (UniqueName: \"kubernetes.io/projected/43833100-b1fd-45fd-b772-9d0ee036c4ce-kube-api-access-pllns\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tc6cr\" (UID: \"43833100-b1fd-45fd-b772-9d0ee036c4ce\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.546247 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.548714 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.552339 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5889h" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.552488 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.569406 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.573793 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78qsj\" (UniqueName: \"kubernetes.io/projected/2b3caeee-8e0e-4a20-9cea-f9f668e2a76f-kube-api-access-78qsj\") pod \"test-operator-controller-manager-5854674fcc-j9wr9\" (UID: \"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.574173 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.574419 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.574481 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:10.574461993 +0000 UTC m=+1009.994528808 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.588293 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.589291 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.592441 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.596054 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.596899 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-xrc4m" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.599436 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.601567 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.609206 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78qsj\" (UniqueName: \"kubernetes.io/projected/2b3caeee-8e0e-4a20-9cea-f9f668e2a76f-kube-api-access-78qsj\") pod \"test-operator-controller-manager-5854674fcc-j9wr9\" (UID: \"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.630656 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.633243 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.636490 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-chbgj" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.638154 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.646345 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.676996 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.677041 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.677074 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpvxv\" (UniqueName: \"kubernetes.io/projected/ce7167d9-e7f3-428e-bbcb-6879014ec908-kube-api-access-dpvxv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mnptg\" (UID: \"ce7167d9-e7f3-428e-bbcb-6879014ec908\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.677121 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4jgf\" (UniqueName: \"kubernetes.io/projected/19094ce3-8926-4668-87b9-db8aac572e80-kube-api-access-h4jgf\") pod \"watcher-operator-controller-manager-7b48476889-fdjfg\" (UID: \"19094ce3-8926-4668-87b9-db8aac572e80\") " pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.677154 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqdhq\" (UniqueName: \"kubernetes.io/projected/0a262894-4e21-4fe3-b216-b135bfb56d5b-kube-api-access-fqdhq\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.688335 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.734327 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.736942 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg"] Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.767544 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.778937 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.778978 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.779006 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpvxv\" (UniqueName: \"kubernetes.io/projected/ce7167d9-e7f3-428e-bbcb-6879014ec908-kube-api-access-dpvxv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mnptg\" (UID: \"ce7167d9-e7f3-428e-bbcb-6879014ec908\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.779049 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4jgf\" (UniqueName: \"kubernetes.io/projected/19094ce3-8926-4668-87b9-db8aac572e80-kube-api-access-h4jgf\") pod \"watcher-operator-controller-manager-7b48476889-fdjfg\" (UID: \"19094ce3-8926-4668-87b9-db8aac572e80\") " pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.779080 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqdhq\" (UniqueName: \"kubernetes.io/projected/0a262894-4e21-4fe3-b216-b135bfb56d5b-kube-api-access-fqdhq\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.779707 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.779803 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:10.279776304 +0000 UTC m=+1009.699843209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.779852 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.779898 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:10.279883777 +0000 UTC m=+1009.699950592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.851205 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4jgf\" (UniqueName: \"kubernetes.io/projected/19094ce3-8926-4668-87b9-db8aac572e80-kube-api-access-h4jgf\") pod \"watcher-operator-controller-manager-7b48476889-fdjfg\" (UID: \"19094ce3-8926-4668-87b9-db8aac572e80\") " pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.855486 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpvxv\" (UniqueName: \"kubernetes.io/projected/ce7167d9-e7f3-428e-bbcb-6879014ec908-kube-api-access-dpvxv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mnptg\" (UID: \"ce7167d9-e7f3-428e-bbcb-6879014ec908\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.863306 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqdhq\" (UniqueName: \"kubernetes.io/projected/0a262894-4e21-4fe3-b216-b135bfb56d5b-kube-api-access-fqdhq\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.880684 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.880888 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: E1205 12:42:09.880953 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:10.880933567 +0000 UTC m=+1010.301000372 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.884009 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:09 crc kubenswrapper[4784]: I1205 12:42:09.989204 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.187037 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" event={"ID":"5434c275-5acc-4ffe-94ff-1cd9440300b0","Type":"ContainerStarted","Data":"9be9c480c26ae04a58e4dca560e8c8916f72983df453eba733c606ab28f4509e"} Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.189386 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" event={"ID":"34392862-6b0a-4e19-8702-d685378817b1","Type":"ContainerStarted","Data":"8c26407937be77b2fd4f8cd6e06f790b5a98ef06f91d1aed2b210627a739ca89"} Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.194765 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" event={"ID":"cae1438b-c8fd-4660-8843-f41bca4b1e15","Type":"ContainerStarted","Data":"45dcc74f4f214518922db1419c508b1eeb7c75fabd621d576e91b150c1f1fc13"} Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.196023 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" event={"ID":"17a1e99d-2e27-47df-93be-afbb5224152b","Type":"ContainerStarted","Data":"6f234ab4274a4cd65d95640e1e38e3354fc8ae66d8bc01a0277822ecbca23515"} Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.209471 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.224231 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.230810 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.240054 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.290219 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.290609 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.290760 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.290820 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:11.290802242 +0000 UTC m=+1010.710869057 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.290829 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.290914 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:11.290893035 +0000 UTC m=+1010.710959910 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.299288 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.309972 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.326085 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd32fc3d7_6f1d_4f5c_8f70_39a417849b13.slice/crio-dc40053de264a60cbd07894132a7e141f630669e282bccc065633f2fecf9a99a WatchSource:0}: Error finding container dc40053de264a60cbd07894132a7e141f630669e282bccc065633f2fecf9a99a: Status 404 returned error can't find the container with id dc40053de264a60cbd07894132a7e141f630669e282bccc065633f2fecf9a99a Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.419296 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-llgbj"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.422798 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf5e1f7a_185c_402f_80b7_fb6c66084d0f.slice/crio-22e1d05cec761552c87a15ae799594b4d96eb4ffe4708e5fc19d6b485c8fdc3a WatchSource:0}: Error finding container 22e1d05cec761552c87a15ae799594b4d96eb4ffe4708e5fc19d6b485c8fdc3a: Status 404 returned error can't find the container with id 22e1d05cec761552c87a15ae799594b4d96eb4ffe4708e5fc19d6b485c8fdc3a Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.424574 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.432812 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.433763 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd14c7f23_4235_4257_a178_6b90aa4cf3b4.slice/crio-ee82f5d0653048fa6df72b059559a50c2a080097ff275f5b21f90f21bf1310fc WatchSource:0}: Error finding container ee82f5d0653048fa6df72b059559a50c2a080097ff275f5b21f90f21bf1310fc: Status 404 returned error can't find the container with id ee82f5d0653048fa6df72b059559a50c2a080097ff275f5b21f90f21bf1310fc Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.440634 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.443346 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc31c0c1f_afa0_4ba8_a638_d27370864b63.slice/crio-8bac446c312d113d81509d10cc5d7ef066ad5be5d43a32e7a759f7ce6039f96c WatchSource:0}: Error finding container 8bac446c312d113d81509d10cc5d7ef066ad5be5d43a32e7a759f7ce6039f96c: Status 404 returned error can't find the container with id 8bac446c312d113d81509d10cc5d7ef066ad5be5d43a32e7a759f7ce6039f96c Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.533040 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.547013 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9"] Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.552798 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.553023 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b3caeee_8e0e_4a20_9cea_f9f668e2a76f.slice/crio-ccd64fbfdeece1fafd41ee369aaeb8827044eb174fad76409bb6823a5d77ee13 WatchSource:0}: Error finding container ccd64fbfdeece1fafd41ee369aaeb8827044eb174fad76409bb6823a5d77ee13: Status 404 returned error can't find the container with id ccd64fbfdeece1fafd41ee369aaeb8827044eb174fad76409bb6823a5d77ee13 Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.555147 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-78qsj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-j9wr9_openstack-operators(2b3caeee-8e0e-4a20-9cea-f9f668e2a76f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.555323 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e6a7f03_9a79_4c8f_8dfd_c1f4e6e3c2ad.slice/crio-f18fbd19b08770a77e623129d2716508d00a5c18939313264680d7d7b392e7f8 WatchSource:0}: Error finding container f18fbd19b08770a77e623129d2716508d00a5c18939313264680d7d7b392e7f8: Status 404 returned error can't find the container with id f18fbd19b08770a77e623129d2716508d00a5c18939313264680d7d7b392e7f8 Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.557869 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-78qsj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-j9wr9_openstack-operators(2b3caeee-8e0e-4a20-9cea-f9f668e2a76f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.558139 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr"] Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.558784 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b29x5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-2jxh6_openstack-operators(2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.559091 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" podUID="2b3caeee-8e0e-4a20-9cea-f9f668e2a76f" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.559515 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r6rlt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-2dt8t_openstack-operators(d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.561251 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r6rlt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-2dt8t_openstack-operators(d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.561641 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b29x5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-2jxh6_openstack-operators(2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.562856 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" podUID="2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.562889 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" podUID="d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5" Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.571742 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43833100_b1fd_45fd_b772_9d0ee036c4ce.slice/crio-85fae20bc8811dd427d14bfe57b695287c5f6b10ab0127ebcd77f743c6029b37 WatchSource:0}: Error finding container 85fae20bc8811dd427d14bfe57b695287c5f6b10ab0127ebcd77f743c6029b37: Status 404 returned error can't find the container with id 85fae20bc8811dd427d14bfe57b695287c5f6b10ab0127ebcd77f743c6029b37 Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.574765 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pllns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-tc6cr_openstack-operators(43833100-b1fd-45fd-b772-9d0ee036c4ce): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.576955 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pllns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-tc6cr_openstack-operators(43833100-b1fd-45fd-b772-9d0ee036c4ce): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.578131 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" podUID="43833100-b1fd-45fd-b772-9d0ee036c4ce" Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.598095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.598340 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.598438 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:12.59839573 +0000 UTC m=+1012.018462545 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.636855 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.640903 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce7167d9_e7f3_428e_bbcb_6879014ec908.slice/crio-fe51f570cf8a237401654b971444219b8d9fea33ffe6e0f706c90d263a7f6506 WatchSource:0}: Error finding container fe51f570cf8a237401654b971444219b8d9fea33ffe6e0f706c90d263a7f6506: Status 404 returned error can't find the container with id fe51f570cf8a237401654b971444219b8d9fea33ffe6e0f706c90d263a7f6506 Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.642401 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg"] Dec 05 12:42:10 crc kubenswrapper[4784]: W1205 12:42:10.646226 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19094ce3_8926_4668_87b9_db8aac572e80.slice/crio-bb574093b7ba3907a460831a4a35986388c2aa8acd44eac8042af602d8248d93 WatchSource:0}: Error finding container bb574093b7ba3907a460831a4a35986388c2aa8acd44eac8042af602d8248d93: Status 404 returned error can't find the container with id bb574093b7ba3907a460831a4a35986388c2aa8acd44eac8042af602d8248d93 Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.649289 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.151:5001/openstack-k8s-operators/watcher-operator:9e2b1e4b7b3896a4c4f152962f74457a6de43346,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h4jgf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7b48476889-fdjfg_openstack-operators(19094ce3-8926-4668-87b9-db8aac572e80): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.651667 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h4jgf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7b48476889-fdjfg_openstack-operators(19094ce3-8926-4668-87b9-db8aac572e80): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.653574 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" podUID="19094ce3-8926-4668-87b9-db8aac572e80" Dec 05 12:42:10 crc kubenswrapper[4784]: I1205 12:42:10.902628 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.902768 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:10 crc kubenswrapper[4784]: E1205 12:42:10.902889 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:12.902873871 +0000 UTC m=+1012.322940686 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.211305 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" event={"ID":"d14c7f23-4235-4257-a178-6b90aa4cf3b4","Type":"ContainerStarted","Data":"ee82f5d0653048fa6df72b059559a50c2a080097ff275f5b21f90f21bf1310fc"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.213478 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" event={"ID":"52133bea-24d3-440b-880d-67a3131c52db","Type":"ContainerStarted","Data":"57ea5cb463e0873b67e8f6a28b11a7386750cd6f2665e109ee91b15bd9c538c1"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.216862 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" event={"ID":"43833100-b1fd-45fd-b772-9d0ee036c4ce","Type":"ContainerStarted","Data":"85fae20bc8811dd427d14bfe57b695287c5f6b10ab0127ebcd77f743c6029b37"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.218866 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" event={"ID":"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5","Type":"ContainerStarted","Data":"7226f42e06a577a89c9704c6bdd7e4aeb39fd4f12ee9acb5f998867826a56cc2"} Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.237092 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" podUID="43833100-b1fd-45fd-b772-9d0ee036c4ce" Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.237784 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" event={"ID":"70f6568a-d588-4d71-8e38-def379ac95cf","Type":"ContainerStarted","Data":"b9b836b99968e6c2d880f0a7cb9cd78702e1136160b1f524acea50ba25798339"} Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.238386 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" podUID="d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5" Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.239007 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" event={"ID":"ce7167d9-e7f3-428e-bbcb-6879014ec908","Type":"ContainerStarted","Data":"fe51f570cf8a237401654b971444219b8d9fea33ffe6e0f706c90d263a7f6506"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.247421 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" event={"ID":"d32fc3d7-6f1d-4f5c-8f70-39a417849b13","Type":"ContainerStarted","Data":"dc40053de264a60cbd07894132a7e141f630669e282bccc065633f2fecf9a99a"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.265298 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" event={"ID":"af5e1f7a-185c-402f-80b7-fb6c66084d0f","Type":"ContainerStarted","Data":"22e1d05cec761552c87a15ae799594b4d96eb4ffe4708e5fc19d6b485c8fdc3a"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.276139 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" event={"ID":"19094ce3-8926-4668-87b9-db8aac572e80","Type":"ContainerStarted","Data":"bb574093b7ba3907a460831a4a35986388c2aa8acd44eac8042af602d8248d93"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.278045 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" event={"ID":"bd95d9d9-a3b2-4f91-94f1-a60041b5b640","Type":"ContainerStarted","Data":"c58eadb93748ffabe81bb5e87b2a70d396f3ac7a6f57efaeb73657cee2bf5f6b"} Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.282454 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/openstack-k8s-operators/watcher-operator:9e2b1e4b7b3896a4c4f152962f74457a6de43346\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" podUID="19094ce3-8926-4668-87b9-db8aac572e80" Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.294398 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" event={"ID":"253f2712-fbf0-476b-8ba3-387f7811e4f7","Type":"ContainerStarted","Data":"7ed0fb7366cf0c4b2980247abce13324916d0a98042545acee21dcdf6ab91f12"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.314574 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" event={"ID":"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad","Type":"ContainerStarted","Data":"f18fbd19b08770a77e623129d2716508d00a5c18939313264680d7d7b392e7f8"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.315892 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.315937 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.316091 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.316137 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:13.316119572 +0000 UTC m=+1012.736186387 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.316983 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.317015 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:13.317004791 +0000 UTC m=+1012.737071606 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.323758 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" podUID="2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad" Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.326885 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" event={"ID":"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5","Type":"ContainerStarted","Data":"01f0633bb0eb33ee80b0893f7016dedebc75feb5ae403523a693e504b50709b2"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.353105 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" event={"ID":"6200dbb3-7166-4fa0-925c-fe6155de2927","Type":"ContainerStarted","Data":"ad0682ab470a5e11fe4ff3b985f2ac31dfe094101c71075de293c4148e6540ab"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.427174 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" event={"ID":"c31c0c1f-afa0-4ba8-a638-d27370864b63","Type":"ContainerStarted","Data":"8bac446c312d113d81509d10cc5d7ef066ad5be5d43a32e7a759f7ce6039f96c"} Dec 05 12:42:11 crc kubenswrapper[4784]: I1205 12:42:11.470070 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" event={"ID":"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f","Type":"ContainerStarted","Data":"ccd64fbfdeece1fafd41ee369aaeb8827044eb174fad76409bb6823a5d77ee13"} Dec 05 12:42:11 crc kubenswrapper[4784]: E1205 12:42:11.480450 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" podUID="2b3caeee-8e0e-4a20-9cea-f9f668e2a76f" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.495939 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" podUID="2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.498163 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" podUID="2b3caeee-8e0e-4a20-9cea-f9f668e2a76f" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.499172 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" podUID="d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.499250 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" podUID="43833100-b1fd-45fd-b772-9d0ee036c4ce" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.502001 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/openstack-k8s-operators/watcher-operator:9e2b1e4b7b3896a4c4f152962f74457a6de43346\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" podUID="19094ce3-8926-4668-87b9-db8aac572e80" Dec 05 12:42:12 crc kubenswrapper[4784]: I1205 12:42:12.646000 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.646462 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.646523 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:16.646505621 +0000 UTC m=+1016.066572446 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:12 crc kubenswrapper[4784]: I1205 12:42:12.950143 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.950359 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:12 crc kubenswrapper[4784]: E1205 12:42:12.950403 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:16.950389982 +0000 UTC m=+1016.370456797 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:13 crc kubenswrapper[4784]: I1205 12:42:13.359300 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:13 crc kubenswrapper[4784]: I1205 12:42:13.359344 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:13 crc kubenswrapper[4784]: E1205 12:42:13.359453 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:13 crc kubenswrapper[4784]: E1205 12:42:13.359500 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:17.359486984 +0000 UTC m=+1016.779553799 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:13 crc kubenswrapper[4784]: E1205 12:42:13.359453 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:13 crc kubenswrapper[4784]: E1205 12:42:13.359529 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:17.359523995 +0000 UTC m=+1016.779590800 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:16 crc kubenswrapper[4784]: I1205 12:42:16.719255 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:16 crc kubenswrapper[4784]: E1205 12:42:16.719579 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:16 crc kubenswrapper[4784]: E1205 12:42:16.719828 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:24.719788862 +0000 UTC m=+1024.139855697 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: I1205 12:42:17.023415 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.023585 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.023678 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:25.023655352 +0000 UTC m=+1024.443722227 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: I1205 12:42:17.430393 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:17 crc kubenswrapper[4784]: I1205 12:42:17.430440 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.430563 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.430593 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.430651 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:25.430629847 +0000 UTC m=+1024.850696722 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:17 crc kubenswrapper[4784]: E1205 12:42:17.430672 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:25.430662398 +0000 UTC m=+1024.850729323 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:21 crc kubenswrapper[4784]: E1205 12:42:21.885012 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 12:42:21 crc kubenswrapper[4784]: E1205 12:42:21.885704 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j2ztl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-8ct4c_openstack-operators(5434c275-5acc-4ffe-94ff-1cd9440300b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:42:24 crc kubenswrapper[4784]: I1205 12:42:24.755474 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:24 crc kubenswrapper[4784]: E1205 12:42:24.755660 4784 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:24 crc kubenswrapper[4784]: E1205 12:42:24.755917 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert podName:6c20830c-fef0-4691-9505-5d0c3726ca11 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:40.755890936 +0000 UTC m=+1040.175957761 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert") pod "infra-operator-controller-manager-57548d458d-6n77l" (UID: "6c20830c-fef0-4691-9505-5d0c3726ca11") : secret "infra-operator-webhook-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: I1205 12:42:25.058847 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.058992 4784 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.059313 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert podName:d2899908-ecd6-4e04-932d-f26909c0f547 nodeName:}" failed. No retries permitted until 2025-12-05 12:42:41.059295563 +0000 UTC m=+1040.479362378 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" (UID: "d2899908-ecd6-4e04-932d-f26909c0f547") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: I1205 12:42:25.465366 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:25 crc kubenswrapper[4784]: I1205 12:42:25.465422 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.465592 4784 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.465626 4784 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.465682 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:41.465657769 +0000 UTC m=+1040.885724604 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "metrics-server-cert" not found Dec 05 12:42:25 crc kubenswrapper[4784]: E1205 12:42:25.465704 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs podName:0a262894-4e21-4fe3-b216-b135bfb56d5b nodeName:}" failed. No retries permitted until 2025-12-05 12:42:41.46569461 +0000 UTC m=+1040.885761495 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs") pod "openstack-operator-controller-manager-6f87cfd46c-qltwb" (UID: "0a262894-4e21-4fe3-b216-b135bfb56d5b") : secret "webhook-server-cert" not found Dec 05 12:42:26 crc kubenswrapper[4784]: E1205 12:42:26.746223 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 12:42:26 crc kubenswrapper[4784]: E1205 12:42:26.746467 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9k6pb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-prd2m_openstack-operators(af5e1f7a-185c-402f-80b7-fb6c66084d0f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:42:27 crc kubenswrapper[4784]: E1205 12:42:27.404029 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 12:42:27 crc kubenswrapper[4784]: E1205 12:42:27.404600 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7bhf9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-bstnz_openstack-operators(253f2712-fbf0-476b-8ba3-387f7811e4f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:42:27 crc kubenswrapper[4784]: E1205 12:42:27.862826 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 12:42:27 crc kubenswrapper[4784]: E1205 12:42:27.863053 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dpvxv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-mnptg_openstack-operators(ce7167d9-e7f3-428e-bbcb-6879014ec908): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:42:27 crc kubenswrapper[4784]: E1205 12:42:27.864275 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" podUID="ce7167d9-e7f3-428e-bbcb-6879014ec908" Dec 05 12:42:28 crc kubenswrapper[4784]: E1205 12:42:28.636857 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" podUID="ce7167d9-e7f3-428e-bbcb-6879014ec908" Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.742311 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" event={"ID":"c31c0c1f-afa0-4ba8-a638-d27370864b63","Type":"ContainerStarted","Data":"0b2ef5288e8d574355bc3283b975b06762e422b90a8ff4c05fa838ddbe445154"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.746893 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" event={"ID":"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f","Type":"ContainerStarted","Data":"335669960e4414ceb4ad702ae0ea59bce66defa4be4691f4eb9ca80a717da253"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.749513 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" event={"ID":"d32fc3d7-6f1d-4f5c-8f70-39a417849b13","Type":"ContainerStarted","Data":"a1d926558811e9c10781a15b67a94adf1634b30a685742f638cc6401adca9de3"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.753330 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" event={"ID":"34392862-6b0a-4e19-8702-d685378817b1","Type":"ContainerStarted","Data":"7c05f39aeb584f63083f26a14702a240196763f0ebfcde8f0614c34587a095b7"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.755248 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" event={"ID":"d14c7f23-4235-4257-a178-6b90aa4cf3b4","Type":"ContainerStarted","Data":"b91878991000d82078cb41758fe65aa702b455929ccebd87a78b745868eb5f47"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.759104 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" event={"ID":"6200dbb3-7166-4fa0-925c-fe6155de2927","Type":"ContainerStarted","Data":"428d43c3edde87343ec85850abdeae43a799b010ee5b843c50729adca007177d"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.760426 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" event={"ID":"52133bea-24d3-440b-880d-67a3131c52db","Type":"ContainerStarted","Data":"a4c537d7b9ca53e05ceaa4836075f0b74ca907e855fc21f47ce74b241ea6bb2c"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.761498 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" event={"ID":"cae1438b-c8fd-4660-8843-f41bca4b1e15","Type":"ContainerStarted","Data":"c496a58fe761e1f26d41ea06cdaac2607966af2d3df81d2e6c6e7edaf4cada5c"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.762683 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" event={"ID":"70f6568a-d588-4d71-8e38-def379ac95cf","Type":"ContainerStarted","Data":"ad8ec67d7595a9cf6f67a32cc7070e4e651132344fa483cf424ec9a1664404af"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.771301 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" event={"ID":"bd95d9d9-a3b2-4f91-94f1-a60041b5b640","Type":"ContainerStarted","Data":"69fa21587595d4335fb835d9e9e77036fa0b59f352ade8387150977334ce08f3"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.785543 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" event={"ID":"17a1e99d-2e27-47df-93be-afbb5224152b","Type":"ContainerStarted","Data":"08cdca7ec01e81b2305f147948f1873433ff92fccabf37d5f9e3ef331a087ca2"} Dec 05 12:42:39 crc kubenswrapper[4784]: I1205 12:42:39.807246 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" event={"ID":"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5","Type":"ContainerStarted","Data":"84c7154ab52ed2895e6fc5c2889ce12dd68276715890b1f60695d94f00727cb1"} Dec 05 12:42:40 crc kubenswrapper[4784]: I1205 12:42:40.806442 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:40 crc kubenswrapper[4784]: I1205 12:42:40.812790 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c20830c-fef0-4691-9505-5d0c3726ca11-cert\") pod \"infra-operator-controller-manager-57548d458d-6n77l\" (UID: \"6c20830c-fef0-4691-9505-5d0c3726ca11\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:40 crc kubenswrapper[4784]: E1205 12:42:40.908299 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 12:42:40 crc kubenswrapper[4784]: E1205 12:42:40.908456 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j2ztl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-8ct4c_openstack-operators(5434c275-5acc-4ffe-94ff-1cd9440300b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:42:40 crc kubenswrapper[4784]: E1205 12:42:40.909641 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" podUID="5434c275-5acc-4ffe-94ff-1cd9440300b0" Dec 05 12:42:40 crc kubenswrapper[4784]: I1205 12:42:40.940282 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tfpnp" Dec 05 12:42:40 crc kubenswrapper[4784]: I1205 12:42:40.949031 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.113897 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.119087 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d2899908-ecd6-4e04-932d-f26909c0f547-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4646jk\" (UID: \"d2899908-ecd6-4e04-932d-f26909c0f547\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.256640 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ntzkz" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.262873 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.520888 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.520946 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.544882 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-metrics-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.545204 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0a262894-4e21-4fe3-b216-b135bfb56d5b-webhook-certs\") pod \"openstack-operator-controller-manager-6f87cfd46c-qltwb\" (UID: \"0a262894-4e21-4fe3-b216-b135bfb56d5b\") " pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.601587 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-6n77l"] Dec 05 12:42:41 crc kubenswrapper[4784]: E1205 12:42:41.622810 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" podUID="af5e1f7a-185c-402f-80b7-fb6c66084d0f" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.684388 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk"] Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.778166 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-xrc4m" Dec 05 12:42:41 crc kubenswrapper[4784]: E1205 12:42:41.788603 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" podUID="253f2712-fbf0-476b-8ba3-387f7811e4f7" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.788883 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.853834 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" event={"ID":"d2899908-ecd6-4e04-932d-f26909c0f547","Type":"ContainerStarted","Data":"289c90e892d5a072b734d0fbfdcc4f96fd3e718a0fa7f016fcaf83788aa0c588"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.862093 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" event={"ID":"cae1438b-c8fd-4660-8843-f41bca4b1e15","Type":"ContainerStarted","Data":"94fbdcb9178fcfd5719210028b7896dee238e181e6feb2a7d089b49117ebd3f6"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.862249 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.866985 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" event={"ID":"19094ce3-8926-4668-87b9-db8aac572e80","Type":"ContainerStarted","Data":"612b1d70e001c813830a66e773b00792c38a812122df74c6cf5ea1584dcbeb98"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.869703 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" event={"ID":"d32fc3d7-6f1d-4f5c-8f70-39a417849b13","Type":"ContainerStarted","Data":"debdc0059b26a40318747a83309fde45a5bb8c8613ac60d47ee72e3f9fdeb0e7"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.869918 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.871988 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" event={"ID":"253f2712-fbf0-476b-8ba3-387f7811e4f7","Type":"ContainerStarted","Data":"3bd8a232365b5425fd5b38a012f922e83dba6773e0940935042a516bdd5ca20e"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.874850 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" event={"ID":"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad","Type":"ContainerStarted","Data":"f63bd5093dc321d86b78a949b5da5798c087ae09b2cf68803838fda055a119b4"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.874905 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" event={"ID":"2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad","Type":"ContainerStarted","Data":"a3139e8554126fed31733f5677dd24f2198c68eafbc0f53a109d18c8451f8806"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.875083 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.879398 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" event={"ID":"af5e1f7a-185c-402f-80b7-fb6c66084d0f","Type":"ContainerStarted","Data":"fda09ee407b389f4e11fc008e68dbb3b3986750cd76e4ed6990717d7325814cc"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.883403 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" event={"ID":"43833100-b1fd-45fd-b772-9d0ee036c4ce","Type":"ContainerStarted","Data":"d19435d97c05be98b84b0c6f8cca34293b2c69ad5efec66d2fd47af155a0019f"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.883455 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" event={"ID":"43833100-b1fd-45fd-b772-9d0ee036c4ce","Type":"ContainerStarted","Data":"e6e8ffb5a0d89b9556aa59db1faf67e4c5c6ef0b489e3364c2a61851fff0cfa1"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.884164 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.896593 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" podStartSLOduration=2.460614503 podStartE2EDuration="33.896568941s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:09.862159585 +0000 UTC m=+1009.282226400" lastFinishedPulling="2025-12-05 12:42:41.298114013 +0000 UTC m=+1040.718180838" observedRunningTime="2025-12-05 12:42:41.887396405 +0000 UTC m=+1041.307463230" watchObservedRunningTime="2025-12-05 12:42:41.896568941 +0000 UTC m=+1041.316635756" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.905106 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" event={"ID":"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5","Type":"ContainerStarted","Data":"8ce18307fd050d2e4767216cca46050e62c682f8471f5fe9ba4651ada415996a"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.905165 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" event={"ID":"d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5","Type":"ContainerStarted","Data":"5e333126a523fadafbe31d159056d86273f848aa81cec11657c240a2fc56f0e8"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.906147 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.916291 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" event={"ID":"6c20830c-fef0-4691-9505-5d0c3726ca11","Type":"ContainerStarted","Data":"224d5875bfe74ecc66246e80f0015ffadccd25b997f5a7dbe1b3d8e321e0d586"} Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.924913 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" podStartSLOduration=4.562128629 podStartE2EDuration="32.924900975s" podCreationTimestamp="2025-12-05 12:42:09 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.574628493 +0000 UTC m=+1009.994695308" lastFinishedPulling="2025-12-05 12:42:38.937400839 +0000 UTC m=+1038.357467654" observedRunningTime="2025-12-05 12:42:41.922673146 +0000 UTC m=+1041.342739961" watchObservedRunningTime="2025-12-05 12:42:41.924900975 +0000 UTC m=+1041.344967790" Dec 05 12:42:41 crc kubenswrapper[4784]: I1205 12:42:41.991344 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" podStartSLOduration=5.612390951 podStartE2EDuration="33.991326326s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.558618159 +0000 UTC m=+1009.978684974" lastFinishedPulling="2025-12-05 12:42:38.937553514 +0000 UTC m=+1038.357620349" observedRunningTime="2025-12-05 12:42:41.968449122 +0000 UTC m=+1041.388515937" watchObservedRunningTime="2025-12-05 12:42:41.991326326 +0000 UTC m=+1041.411393141" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.028906 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" podStartSLOduration=3.094553383 podStartE2EDuration="34.028885177s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.328018343 +0000 UTC m=+1009.748085158" lastFinishedPulling="2025-12-05 12:42:41.262350137 +0000 UTC m=+1040.682416952" observedRunningTime="2025-12-05 12:42:42.020617119 +0000 UTC m=+1041.440683934" watchObservedRunningTime="2025-12-05 12:42:42.028885177 +0000 UTC m=+1041.448951992" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.098008 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" podStartSLOduration=5.819145947 podStartE2EDuration="34.097985881s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.559437334 +0000 UTC m=+1009.979504149" lastFinishedPulling="2025-12-05 12:42:38.838277268 +0000 UTC m=+1038.258344083" observedRunningTime="2025-12-05 12:42:42.094575585 +0000 UTC m=+1041.514642410" watchObservedRunningTime="2025-12-05 12:42:42.097985881 +0000 UTC m=+1041.518052696" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.787584 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb"] Dec 05 12:42:42 crc kubenswrapper[4784]: W1205 12:42:42.821194 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a262894_4e21_4fe3_b216_b135bfb56d5b.slice/crio-91ce3f2d6652e4393e75b70de0ed81ff726d98b59d6ca25ce26d98b1541a5fd9 WatchSource:0}: Error finding container 91ce3f2d6652e4393e75b70de0ed81ff726d98b59d6ca25ce26d98b1541a5fd9: Status 404 returned error can't find the container with id 91ce3f2d6652e4393e75b70de0ed81ff726d98b59d6ca25ce26d98b1541a5fd9 Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.934332 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" event={"ID":"17a1e99d-2e27-47df-93be-afbb5224152b","Type":"ContainerStarted","Data":"c1eb00eb9fce9f866326f751a292d10aa75522dac1f1f09605dcbff8b9bbd603"} Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.934400 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.961389 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" event={"ID":"6200dbb3-7166-4fa0-925c-fe6155de2927","Type":"ContainerStarted","Data":"0e602a298ab7cf6115234ccbc9646ed248d5a1597d10a5632e6999fd370f60d9"} Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.961968 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.967923 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" event={"ID":"19094ce3-8926-4668-87b9-db8aac572e80","Type":"ContainerStarted","Data":"75dea2886b8a7fa5c7581b990e5de639e53a07dbbf0a8344d518c90f839d6ebe"} Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.968116 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.972458 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" podStartSLOduration=3.322467397 podStartE2EDuration="34.972444486s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:09.691704212 +0000 UTC m=+1009.111771027" lastFinishedPulling="2025-12-05 12:42:41.341681301 +0000 UTC m=+1040.761748116" observedRunningTime="2025-12-05 12:42:42.969478323 +0000 UTC m=+1042.389545138" watchObservedRunningTime="2025-12-05 12:42:42.972444486 +0000 UTC m=+1042.392511301" Dec 05 12:42:42 crc kubenswrapper[4784]: I1205 12:42:42.973062 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" event={"ID":"0a262894-4e21-4fe3-b216-b135bfb56d5b","Type":"ContainerStarted","Data":"91ce3f2d6652e4393e75b70de0ed81ff726d98b59d6ca25ce26d98b1541a5fd9"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.005652 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" podStartSLOduration=4.17893263 podStartE2EDuration="35.00563444s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.324563744 +0000 UTC m=+1009.744630559" lastFinishedPulling="2025-12-05 12:42:41.151265554 +0000 UTC m=+1040.571332369" observedRunningTime="2025-12-05 12:42:42.992663215 +0000 UTC m=+1042.412730020" watchObservedRunningTime="2025-12-05 12:42:43.00563444 +0000 UTC m=+1042.425701255" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.022012 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" podStartSLOduration=5.855711331 podStartE2EDuration="34.0219973s" podCreationTimestamp="2025-12-05 12:42:09 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.649144167 +0000 UTC m=+1010.069210982" lastFinishedPulling="2025-12-05 12:42:38.815430116 +0000 UTC m=+1038.235496951" observedRunningTime="2025-12-05 12:42:43.019559284 +0000 UTC m=+1042.439626099" watchObservedRunningTime="2025-12-05 12:42:43.0219973 +0000 UTC m=+1042.442064115" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.029862 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" event={"ID":"d14c7f23-4235-4257-a178-6b90aa4cf3b4","Type":"ContainerStarted","Data":"a8718f89ea7152ed62b0c1fbe1fa0dd7c65ca7df76f524864e7a4c60b27cd1af"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.029895 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.029913 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" event={"ID":"52133bea-24d3-440b-880d-67a3131c52db","Type":"ContainerStarted","Data":"936a51ddd8016c7e9017b1e9e1b5b5459b6d20670b458b468032514e1de0deb9"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.029929 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.034785 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" event={"ID":"2b3caeee-8e0e-4a20-9cea-f9f668e2a76f","Type":"ContainerStarted","Data":"f2d0718e9d2e0bce11472db2b879c2a91231fbb19d5a8899dd597dd23e63905c"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.034972 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.046524 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" podStartSLOduration=4.074813966 podStartE2EDuration="35.046507054s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.436537787 +0000 UTC m=+1009.856604602" lastFinishedPulling="2025-12-05 12:42:41.408230875 +0000 UTC m=+1040.828297690" observedRunningTime="2025-12-05 12:42:43.044572134 +0000 UTC m=+1042.464638949" watchObservedRunningTime="2025-12-05 12:42:43.046507054 +0000 UTC m=+1042.466573869" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.048561 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" event={"ID":"34392862-6b0a-4e19-8702-d685378817b1","Type":"ContainerStarted","Data":"6e50656f4fa7e79942885ae9b59455534e7574c3eacdc08ac35b4f1c23c44349"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.049169 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.052168 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" event={"ID":"bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5","Type":"ContainerStarted","Data":"c0303caf4c2f58aceb24c1464f23109e1833f80c97494c71f07bb3c5c629c21c"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.052675 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.062100 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" event={"ID":"af5e1f7a-185c-402f-80b7-fb6c66084d0f","Type":"ContainerStarted","Data":"4f0716f4591201cdac9062c73cdb7cd86fe8ed552bdb89e18660554d28c0164e"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.062883 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.067169 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" event={"ID":"253f2712-fbf0-476b-8ba3-387f7811e4f7","Type":"ContainerStarted","Data":"50bacbc387dd9173cfc3c168683c9891711afe896f8e5a8083a611317b0aef2c"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.067786 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.091650 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" event={"ID":"5434c275-5acc-4ffe-94ff-1cd9440300b0","Type":"ContainerStarted","Data":"ac2cac6268609b08fe14343da655f0ff705c69225f71b869deac62b5dc7a0002"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.097995 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" podStartSLOduration=4.083793095 podStartE2EDuration="35.097965019s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.430063334 +0000 UTC m=+1009.850130149" lastFinishedPulling="2025-12-05 12:42:41.444235248 +0000 UTC m=+1040.864302073" observedRunningTime="2025-12-05 12:42:43.07459037 +0000 UTC m=+1042.494657185" watchObservedRunningTime="2025-12-05 12:42:43.097965019 +0000 UTC m=+1042.518031834" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.133658 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" podStartSLOduration=2.9423339630000003 podStartE2EDuration="35.133641661s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.210746353 +0000 UTC m=+1009.630813168" lastFinishedPulling="2025-12-05 12:42:42.402054051 +0000 UTC m=+1041.822120866" observedRunningTime="2025-12-05 12:42:43.132679871 +0000 UTC m=+1042.552746686" watchObservedRunningTime="2025-12-05 12:42:43.133641661 +0000 UTC m=+1042.553708476" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.140452 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" event={"ID":"ce7167d9-e7f3-428e-bbcb-6879014ec908","Type":"ContainerStarted","Data":"8f42ae1b079861ac59b10e30455570d358712cdbada9b60f45ecdaf12a0e996a"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.169507 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" podStartSLOduration=4.062832864 podStartE2EDuration="35.169491349s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.266645682 +0000 UTC m=+1009.686712497" lastFinishedPulling="2025-12-05 12:42:41.373304167 +0000 UTC m=+1040.793370982" observedRunningTime="2025-12-05 12:42:43.168435196 +0000 UTC m=+1042.588502011" watchObservedRunningTime="2025-12-05 12:42:43.169491349 +0000 UTC m=+1042.589558164" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.175733 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" event={"ID":"c31c0c1f-afa0-4ba8-a638-d27370864b63","Type":"ContainerStarted","Data":"68a668de9ffba679775bd57ed14a6bf911bd95b8f9cd53ebd077147fb8baf05b"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.176458 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.180822 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" event={"ID":"70f6568a-d588-4d71-8e38-def379ac95cf","Type":"ContainerStarted","Data":"05326619a14a96bad812ec7d1600fe29c061e4b4dd1574ecfe009687e4afa6a0"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.181278 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.183582 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" event={"ID":"bd95d9d9-a3b2-4f91-94f1-a60041b5b640","Type":"ContainerStarted","Data":"074cd170c5c0be0f0e7d5c380d5fd2f1a9276333c507bd6b8437ed5726ef1bab"} Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.183610 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.219585 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" podStartSLOduration=3.254438066 podStartE2EDuration="35.21956559s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.429687312 +0000 UTC m=+1009.849754127" lastFinishedPulling="2025-12-05 12:42:42.394814836 +0000 UTC m=+1041.814881651" observedRunningTime="2025-12-05 12:42:43.195790239 +0000 UTC m=+1042.615857064" watchObservedRunningTime="2025-12-05 12:42:43.21956559 +0000 UTC m=+1042.639632405" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.235719 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" podStartSLOduration=3.449766561 podStartE2EDuration="34.235696893s" podCreationTimestamp="2025-12-05 12:42:09 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.555033746 +0000 UTC m=+1009.975100561" lastFinishedPulling="2025-12-05 12:42:41.340964078 +0000 UTC m=+1040.761030893" observedRunningTime="2025-12-05 12:42:43.219962522 +0000 UTC m=+1042.640029337" watchObservedRunningTime="2025-12-05 12:42:43.235696893 +0000 UTC m=+1042.655763708" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.251340 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" podStartSLOduration=3.808063283 podStartE2EDuration="35.25132137s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:09.854628748 +0000 UTC m=+1009.274695563" lastFinishedPulling="2025-12-05 12:42:41.297886835 +0000 UTC m=+1040.717953650" observedRunningTime="2025-12-05 12:42:43.245810568 +0000 UTC m=+1042.665877393" watchObservedRunningTime="2025-12-05 12:42:43.25132137 +0000 UTC m=+1042.671388185" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.267441 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" podStartSLOduration=4.164746144 podStartE2EDuration="35.267425162s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.238569289 +0000 UTC m=+1009.658636104" lastFinishedPulling="2025-12-05 12:42:41.341248307 +0000 UTC m=+1040.761315122" observedRunningTime="2025-12-05 12:42:43.261990323 +0000 UTC m=+1042.682057138" watchObservedRunningTime="2025-12-05 12:42:43.267425162 +0000 UTC m=+1042.687491977" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.279026 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" podStartSLOduration=4.365684627 podStartE2EDuration="35.279008683s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.445453608 +0000 UTC m=+1009.865520423" lastFinishedPulling="2025-12-05 12:42:41.358777664 +0000 UTC m=+1040.778844479" observedRunningTime="2025-12-05 12:42:43.277355152 +0000 UTC m=+1042.697421967" watchObservedRunningTime="2025-12-05 12:42:43.279008683 +0000 UTC m=+1042.699075498" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.300628 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mnptg" podStartSLOduration=2.326618358 podStartE2EDuration="34.300606876s" podCreationTimestamp="2025-12-05 12:42:09 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.64828851 +0000 UTC m=+1010.068355325" lastFinishedPulling="2025-12-05 12:42:42.622277028 +0000 UTC m=+1042.042343843" observedRunningTime="2025-12-05 12:42:43.28756538 +0000 UTC m=+1042.707632195" watchObservedRunningTime="2025-12-05 12:42:43.300606876 +0000 UTC m=+1042.720673691" Dec 05 12:42:43 crc kubenswrapper[4784]: I1205 12:42:43.318272 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" podStartSLOduration=4.217883291 podStartE2EDuration="35.318254817s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:10.23892013 +0000 UTC m=+1009.658986945" lastFinishedPulling="2025-12-05 12:42:41.339291656 +0000 UTC m=+1040.759358471" observedRunningTime="2025-12-05 12:42:43.313014234 +0000 UTC m=+1042.733081059" watchObservedRunningTime="2025-12-05 12:42:43.318254817 +0000 UTC m=+1042.738321622" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.204230 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" event={"ID":"0a262894-4e21-4fe3-b216-b135bfb56d5b","Type":"ContainerStarted","Data":"36683776d900c2ab7dcf3221d055e0a33159ad5b29e8a609396afb8ccbac74fd"} Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.204698 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.213080 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" event={"ID":"5434c275-5acc-4ffe-94ff-1cd9440300b0","Type":"ContainerStarted","Data":"146bb8c3bf50236e20406aa204dcfb911167a75c34b181b2f33579bfd8f8c672"} Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.213130 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.222522 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-j9wr9" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.224455 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fhllr" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.224931 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-tkgxb" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.226162 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-grj7k" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.226228 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-lw4zg" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.226246 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-llgbj" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.228355 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fs6fj" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.229791 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5z5pm" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.229956 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-fkjcs" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.231851 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" podStartSLOduration=35.231834121 podStartE2EDuration="35.231834121s" podCreationTimestamp="2025-12-05 12:42:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:42:44.228556439 +0000 UTC m=+1043.648623254" watchObservedRunningTime="2025-12-05 12:42:44.231834121 +0000 UTC m=+1043.651900936" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.235318 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-bk9hd" Dec 05 12:42:44 crc kubenswrapper[4784]: I1205 12:42:44.278132 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" podStartSLOduration=3.226152001 podStartE2EDuration="36.278111423s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:09.577468228 +0000 UTC m=+1008.997535043" lastFinishedPulling="2025-12-05 12:42:42.62942766 +0000 UTC m=+1042.049494465" observedRunningTime="2025-12-05 12:42:44.264783878 +0000 UTC m=+1043.684850693" watchObservedRunningTime="2025-12-05 12:42:44.278111423 +0000 UTC m=+1043.698178238" Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.226287 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" event={"ID":"d2899908-ecd6-4e04-932d-f26909c0f547","Type":"ContainerStarted","Data":"d2f10c301c187de7902c781112878d1be1ea793d1fabaed0df2ceeda9a0eb4bc"} Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.226720 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" event={"ID":"d2899908-ecd6-4e04-932d-f26909c0f547","Type":"ContainerStarted","Data":"3d1372222fa33f6376038e3a47b72be7ab116adadbee881ca36d9e8c3e2e774a"} Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.226759 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.229728 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" event={"ID":"6c20830c-fef0-4691-9505-5d0c3726ca11","Type":"ContainerStarted","Data":"3cdba335ff2c1e5333d8002b4989b0986f064f9b2f8e5b1bab9b50a7500c4185"} Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.229797 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" event={"ID":"6c20830c-fef0-4691-9505-5d0c3726ca11","Type":"ContainerStarted","Data":"fd17a64bb5761c26bd417187cbde783fdeeabe2f175356e1e4fd14512ebc2683"} Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.267350 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" podStartSLOduration=34.5681441 podStartE2EDuration="38.267320184s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:41.742938051 +0000 UTC m=+1041.163004866" lastFinishedPulling="2025-12-05 12:42:45.442114135 +0000 UTC m=+1044.862180950" observedRunningTime="2025-12-05 12:42:46.258100656 +0000 UTC m=+1045.678167511" watchObservedRunningTime="2025-12-05 12:42:46.267320184 +0000 UTC m=+1045.687387039" Dec 05 12:42:46 crc kubenswrapper[4784]: I1205 12:42:46.282272 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" podStartSLOduration=34.477230365 podStartE2EDuration="38.282257039s" podCreationTimestamp="2025-12-05 12:42:08 +0000 UTC" firstStartedPulling="2025-12-05 12:42:41.62197464 +0000 UTC m=+1041.042041455" lastFinishedPulling="2025-12-05 12:42:45.427001314 +0000 UTC m=+1044.847068129" observedRunningTime="2025-12-05 12:42:46.277931175 +0000 UTC m=+1045.697997990" watchObservedRunningTime="2025-12-05 12:42:46.282257039 +0000 UTC m=+1045.702323854" Dec 05 12:42:47 crc kubenswrapper[4784]: I1205 12:42:47.236900 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:48 crc kubenswrapper[4784]: I1205 12:42:48.963539 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8ct4c" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.041629 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-g4cvf" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.234081 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bstnz" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.392498 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-n6sh9" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.410960 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-prd2m" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.496002 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-2jxh6" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.572314 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-2dt8t" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.604853 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tc6cr" Dec 05 12:42:49 crc kubenswrapper[4784]: I1205 12:42:49.887631 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7b48476889-fdjfg" Dec 05 12:42:50 crc kubenswrapper[4784]: I1205 12:42:50.958352 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-6n77l" Dec 05 12:42:51 crc kubenswrapper[4784]: I1205 12:42:51.271403 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4646jk" Dec 05 12:42:51 crc kubenswrapper[4784]: I1205 12:42:51.796893 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6f87cfd46c-qltwb" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.297339 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.299068 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.304691 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-dxxdd" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.305364 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.305662 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.306653 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.324418 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.393232 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.394559 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.396338 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.404012 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.460721 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gww97\" (UniqueName: \"kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.460792 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.562500 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.562588 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gww97\" (UniqueName: \"kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.562728 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.562803 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.562911 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k2qd\" (UniqueName: \"kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.564006 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.584144 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gww97\" (UniqueName: \"kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97\") pod \"dnsmasq-dns-6697f74bb9-lvd58\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.621908 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.664079 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k2qd\" (UniqueName: \"kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.664160 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.664236 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.664987 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.665856 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.686048 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k2qd\" (UniqueName: \"kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd\") pod \"dnsmasq-dns-64ff4bc6cc-z9fht\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:14 crc kubenswrapper[4784]: I1205 12:43:14.709493 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:15 crc kubenswrapper[4784]: I1205 12:43:15.137493 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:15 crc kubenswrapper[4784]: I1205 12:43:15.216991 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:15 crc kubenswrapper[4784]: W1205 12:43:15.221905 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d82c03a_55b1_4e18_b073_a75808dc0d9a.slice/crio-e46b68285247d126eab5ff88a8c2d89a42a39c423839125164d733917beba929 WatchSource:0}: Error finding container e46b68285247d126eab5ff88a8c2d89a42a39c423839125164d733917beba929: Status 404 returned error can't find the container with id e46b68285247d126eab5ff88a8c2d89a42a39c423839125164d733917beba929 Dec 05 12:43:15 crc kubenswrapper[4784]: I1205 12:43:15.494567 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" event={"ID":"35d88457-3c06-4722-a27b-d343a6b97352","Type":"ContainerStarted","Data":"892a85aa1590fc1929e7907e4f3882cb8cc9fbc785b72b80c5c02249354d76c1"} Dec 05 12:43:15 crc kubenswrapper[4784]: I1205 12:43:15.496172 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" event={"ID":"4d82c03a-55b1-4e18-b073-a75808dc0d9a","Type":"ContainerStarted","Data":"e46b68285247d126eab5ff88a8c2d89a42a39c423839125164d733917beba929"} Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.213330 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.224839 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.229687 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.243615 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.335683 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.335750 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwg5t\" (UniqueName: \"kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.335862 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.439934 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.439994 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwg5t\" (UniqueName: \"kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.440071 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.440845 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.441373 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.469370 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwg5t\" (UniqueName: \"kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t\") pod \"dnsmasq-dns-86d858c69c-66jcn\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.579542 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.600116 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.628833 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.630455 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.648100 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.648158 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.648245 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mwjq\" (UniqueName: \"kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.648324 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.749264 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.749317 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.749378 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mwjq\" (UniqueName: \"kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.750603 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.750773 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:18 crc kubenswrapper[4784]: I1205 12:43:18.790921 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mwjq\" (UniqueName: \"kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq\") pod \"dnsmasq-dns-786b66f8cc-7bp78\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.013117 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.014787 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.016107 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.018244 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.035982 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.054818 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.054877 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.054920 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b6kg\" (UniqueName: \"kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.155603 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.155659 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.155698 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b6kg\" (UniqueName: \"kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.156758 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.157301 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.178912 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b6kg\" (UniqueName: \"kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg\") pod \"dnsmasq-dns-7c44d66bd9-9tmnb\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.351653 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:43:19 crc kubenswrapper[4784]: W1205 12:43:19.354395 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bf4da93_86e3_498e_8fe9_e97688dc2479.slice/crio-c30229b5f153552da7373a88a1e96038c42a8bbc82cfbcba52f0da483bdb3d96 WatchSource:0}: Error finding container c30229b5f153552da7373a88a1e96038c42a8bbc82cfbcba52f0da483bdb3d96: Status 404 returned error can't find the container with id c30229b5f153552da7373a88a1e96038c42a8bbc82cfbcba52f0da483bdb3d96 Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.354599 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.540249 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" event={"ID":"6bf4da93-86e3-498e-8fe9-e97688dc2479","Type":"ContainerStarted","Data":"c30229b5f153552da7373a88a1e96038c42a8bbc82cfbcba52f0da483bdb3d96"} Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.578092 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.780688 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.782262 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.785414 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.785588 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.785711 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.785811 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.785965 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.786198 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.787475 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-zlsh7" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.792027 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.797519 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.856871 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.858072 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861320 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861365 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861400 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861503 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-2h67b" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861546 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861579 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.861830 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.876560 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.877883 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.877937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.877957 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv5vh\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.877986 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878004 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878021 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878041 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878062 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878084 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.878341 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.890267 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979446 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979496 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv5vh\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979536 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979561 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979583 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979607 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979846 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.979632 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980732 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980756 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980782 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980807 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980835 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980873 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980904 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvp96\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980935 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980963 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.980992 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.981521 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.981804 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.981861 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.981897 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.981950 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.985299 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.986500 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.987064 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.987111 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.987262 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.987374 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.988129 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:19 crc kubenswrapper[4784]: I1205 12:43:19.988252 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.008309 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.008452 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv5vh\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.023670 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085339 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvp96\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085461 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085588 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085658 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085741 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085821 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085893 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085923 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.085970 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.086010 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.086074 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.086065 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.086414 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.087413 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.088058 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.088385 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.088932 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.091462 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.091886 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.093967 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.094944 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.101256 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvp96\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.113615 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.158037 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.159680 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.163546 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.168119 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.168484 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.168655 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-d799d" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.168763 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.168985 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.169316 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.169490 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.190902 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.203676 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291536 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291632 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291679 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291728 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a051f14-c8d2-4d57-95a9-9be7c46f9031-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291757 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a051f14-c8d2-4d57-95a9-9be7c46f9031-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291780 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291803 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291922 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291964 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.291989 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.292069 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rg6d\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-kube-api-access-5rg6d\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.392955 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393042 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a051f14-c8d2-4d57-95a9-9be7c46f9031-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393076 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a051f14-c8d2-4d57-95a9-9be7c46f9031-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393104 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393128 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393154 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393208 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393457 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393500 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rg6d\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-kube-api-access-5rg6d\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393577 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393614 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.393645 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.394138 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.394768 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.395388 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.396087 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a051f14-c8d2-4d57-95a9-9be7c46f9031-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.396364 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.402091 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a051f14-c8d2-4d57-95a9-9be7c46f9031-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.409912 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.410456 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.422323 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a051f14-c8d2-4d57-95a9-9be7c46f9031-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.423124 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rg6d\" (UniqueName: \"kubernetes.io/projected/0a051f14-c8d2-4d57-95a9-9be7c46f9031-kube-api-access-5rg6d\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.432674 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"0a051f14-c8d2-4d57-95a9-9be7c46f9031\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.492029 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.557791 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" event={"ID":"c9e507fa-3712-4c8d-92ae-1b66449ec42c","Type":"ContainerStarted","Data":"6cbf8ffed71c0a618bfae84d3b3706c03918773712d26445ab17969afe84c2ac"} Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.570817 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" event={"ID":"1f025e17-70a2-4972-88d2-f3bf817479d4","Type":"ContainerStarted","Data":"52f387481be45563aef08301ef9dc4f28f076f108c9d6600076cc3b5953fef9f"} Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.738773 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:43:20 crc kubenswrapper[4784]: W1205 12:43:20.753465 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9fd9278_0aaf_4bf3_8753_f21a2fe15f3b.slice/crio-f855e2c926d456e2afb4acedb0b5783ce2a787ab6a2f06dcbcb51b447ecf68e6 WatchSource:0}: Error finding container f855e2c926d456e2afb4acedb0b5783ce2a787ab6a2f06dcbcb51b447ecf68e6: Status 404 returned error can't find the container with id f855e2c926d456e2afb4acedb0b5783ce2a787ab6a2f06dcbcb51b447ecf68e6 Dec 05 12:43:20 crc kubenswrapper[4784]: I1205 12:43:20.887980 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.166397 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.599925 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerStarted","Data":"49af8e3875e1ff9d2041e2c3c8c3c2983967017b0e17cf9854be4f9767d78587"} Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.601217 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"0a051f14-c8d2-4d57-95a9-9be7c46f9031","Type":"ContainerStarted","Data":"6a617cd7926c330407004f439d0a8666b447573165bbfd4a1d1bd35fbd348e29"} Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.603519 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerStarted","Data":"f855e2c926d456e2afb4acedb0b5783ce2a787ab6a2f06dcbcb51b447ecf68e6"} Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.853215 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.855280 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.865403 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.866673 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-gvzbw" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.866828 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.869082 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.883596 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 12:43:21 crc kubenswrapper[4784]: I1205 12:43:21.890197 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.036453 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.036619 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.037653 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-kolla-config\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.037747 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-default\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.037969 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlkgk\" (UniqueName: \"kubernetes.io/projected/2142f1ca-e4be-48fc-94b9-12d5f7737366-kube-api-access-jlkgk\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.038053 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.038096 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.038174 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.140462 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.140597 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141158 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141444 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-kolla-config\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141471 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-default\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141577 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlkgk\" (UniqueName: \"kubernetes.io/projected/2142f1ca-e4be-48fc-94b9-12d5f7737366-kube-api-access-jlkgk\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141622 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141644 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.141969 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.142309 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-kolla-config\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.143761 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.147247 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.150040 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.150403 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2142f1ca-e4be-48fc-94b9-12d5f7737366-config-data-default\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.155305 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2142f1ca-e4be-48fc-94b9-12d5f7737366-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.161587 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlkgk\" (UniqueName: \"kubernetes.io/projected/2142f1ca-e4be-48fc-94b9-12d5f7737366-kube-api-access-jlkgk\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.189682 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2142f1ca-e4be-48fc-94b9-12d5f7737366\") " pod="openstack/openstack-galera-0" Dec 05 12:43:22 crc kubenswrapper[4784]: I1205 12:43:22.195278 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.279903 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.281497 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.283179 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-2vwb6" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.283536 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.283568 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.286005 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.288721 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477695 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477799 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477823 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477853 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477917 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnbmc\" (UniqueName: \"kubernetes.io/projected/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kube-api-access-pnbmc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.477957 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.478031 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.478054 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581015 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581083 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnbmc\" (UniqueName: \"kubernetes.io/projected/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kube-api-access-pnbmc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581103 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581153 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581176 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581225 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581262 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581284 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.581947 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.582962 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.583789 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.592904 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.594464 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.599671 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.600797 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.606755 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnbmc\" (UniqueName: \"kubernetes.io/projected/62eaeb31-76a0-4f2b-9bbe-b00f25a620e3-kube-api-access-pnbmc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.632357 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3\") " pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.787163 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.788530 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.797085 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.797303 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-qwqvw" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.797382 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.816836 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.886932 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.887015 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.887038 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-kolla-config\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.887068 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjghh\" (UniqueName: \"kubernetes.io/projected/a93486d9-05bf-44e5-9991-5ca89f117938-kube-api-access-jjghh\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.887116 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-config-data\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.899326 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.988670 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-kolla-config\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.988724 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjghh\" (UniqueName: \"kubernetes.io/projected/a93486d9-05bf-44e5-9991-5ca89f117938-kube-api-access-jjghh\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.988776 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-config-data\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.988812 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.988875 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.990282 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-kolla-config\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.990454 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a93486d9-05bf-44e5-9991-5ca89f117938-config-data\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.995931 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:23 crc kubenswrapper[4784]: I1205 12:43:23.996331 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a93486d9-05bf-44e5-9991-5ca89f117938-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:24 crc kubenswrapper[4784]: I1205 12:43:24.004794 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjghh\" (UniqueName: \"kubernetes.io/projected/a93486d9-05bf-44e5-9991-5ca89f117938-kube-api-access-jjghh\") pod \"memcached-0\" (UID: \"a93486d9-05bf-44e5-9991-5ca89f117938\") " pod="openstack/memcached-0" Dec 05 12:43:24 crc kubenswrapper[4784]: I1205 12:43:24.129730 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.683511 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.692377 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.694674 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2svz8" Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.700696 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.841048 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kw8x\" (UniqueName: \"kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x\") pod \"kube-state-metrics-0\" (UID: \"28e48eb5-7362-46c6-8b58-d1d5f1a111c9\") " pod="openstack/kube-state-metrics-0" Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.943257 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kw8x\" (UniqueName: \"kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x\") pod \"kube-state-metrics-0\" (UID: \"28e48eb5-7362-46c6-8b58-d1d5f1a111c9\") " pod="openstack/kube-state-metrics-0" Dec 05 12:43:25 crc kubenswrapper[4784]: I1205 12:43:25.983017 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kw8x\" (UniqueName: \"kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x\") pod \"kube-state-metrics-0\" (UID: \"28e48eb5-7362-46c6-8b58-d1d5f1a111c9\") " pod="openstack/kube-state-metrics-0" Dec 05 12:43:26 crc kubenswrapper[4784]: I1205 12:43:26.039562 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.033472 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.035840 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.035947 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.039134 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-mphlc" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.039341 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.039561 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.039707 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.040430 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.044771 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.163587 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.163900 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.163933 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c2bf\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.163987 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.164018 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.164039 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.164057 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.164073 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268392 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268493 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c2bf\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268584 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268661 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268723 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268747 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268791 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.268845 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.270005 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.276011 4784 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.276049 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b62a06a2697edf91f726f31fa2b1cc522cd33435aa7b897a0891f96c3d70ee18/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.282210 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.282600 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.284224 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.287741 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.288054 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.288438 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c2bf\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.354392 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:27 crc kubenswrapper[4784]: I1205 12:43:27.363934 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.529899 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.533028 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.535565 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-4dgm7" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.536468 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.537074 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.537859 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.538875 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.543756 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709335 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709386 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709432 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709460 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709480 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpjj5\" (UniqueName: \"kubernetes.io/projected/3af95a38-f2ad-44f7-a99d-77d48faa79f8-kube-api-access-tpjj5\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709526 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709599 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.709705 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.787018 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ljc2l"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.788402 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.796697 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.796986 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6tnwx" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.797228 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.799932 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-6s6n5"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.802141 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813645 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813735 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813792 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813824 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813885 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813931 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.813968 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpjj5\" (UniqueName: \"kubernetes.io/projected/3af95a38-f2ad-44f7-a99d-77d48faa79f8-kube-api-access-tpjj5\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.814038 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.815864 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.827791 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.899175 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-6s6n5"] Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.915522 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8762021a-ee57-4a56-b752-da1d808ca0ff-scripts\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.915572 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-log\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.915600 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-lib\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.915818 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-etc-ovs\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916075 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-ovn-controller-tls-certs\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916124 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-combined-ca-bundle\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916239 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916272 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-log-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916289 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzdz5\" (UniqueName: \"kubernetes.io/projected/938888bc-6cef-410e-b517-9fdb0c824405-kube-api-access-tzdz5\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916402 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l722w\" (UniqueName: \"kubernetes.io/projected/8762021a-ee57-4a56-b752-da1d808ca0ff-kube-api-access-l722w\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916436 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-run\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916463 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/938888bc-6cef-410e-b517-9fdb0c824405-scripts\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.916491 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.936871 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.936973 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af95a38-f2ad-44f7-a99d-77d48faa79f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.937070 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.942020 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpjj5\" (UniqueName: \"kubernetes.io/projected/3af95a38-f2ad-44f7-a99d-77d48faa79f8-kube-api-access-tpjj5\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.943314 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.946315 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.946674 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:29 crc kubenswrapper[4784]: I1205 12:43:29.953484 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3af95a38-f2ad-44f7-a99d-77d48faa79f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3af95a38-f2ad-44f7-a99d-77d48faa79f8\") " pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018197 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l722w\" (UniqueName: \"kubernetes.io/projected/8762021a-ee57-4a56-b752-da1d808ca0ff-kube-api-access-l722w\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018242 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-run\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018266 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/938888bc-6cef-410e-b517-9fdb0c824405-scripts\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018293 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018324 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8762021a-ee57-4a56-b752-da1d808ca0ff-scripts\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018345 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-log\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018364 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-lib\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018437 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-etc-ovs\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018459 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-ovn-controller-tls-certs\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018480 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-combined-ca-bundle\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018513 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018532 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-log-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.018548 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzdz5\" (UniqueName: \"kubernetes.io/projected/938888bc-6cef-410e-b517-9fdb0c824405-kube-api-access-tzdz5\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.019320 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-run\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.019675 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-etc-ovs\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021036 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/938888bc-6cef-410e-b517-9fdb0c824405-scripts\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021120 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-log-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021433 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021475 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-lib\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021509 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/938888bc-6cef-410e-b517-9fdb0c824405-var-run-ovn\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.021825 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8762021a-ee57-4a56-b752-da1d808ca0ff-var-log\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.023941 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8762021a-ee57-4a56-b752-da1d808ca0ff-scripts\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.032505 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-ovn-controller-tls-certs\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.033244 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/938888bc-6cef-410e-b517-9fdb0c824405-combined-ca-bundle\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.035802 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l722w\" (UniqueName: \"kubernetes.io/projected/8762021a-ee57-4a56-b752-da1d808ca0ff-kube-api-access-l722w\") pod \"ovn-controller-ovs-6s6n5\" (UID: \"8762021a-ee57-4a56-b752-da1d808ca0ff\") " pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.036805 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzdz5\" (UniqueName: \"kubernetes.io/projected/938888bc-6cef-410e-b517-9fdb0c824405-kube-api-access-tzdz5\") pod \"ovn-controller-ljc2l\" (UID: \"938888bc-6cef-410e-b517-9fdb0c824405\") " pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.117859 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.129993 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:43:30 crc kubenswrapper[4784]: I1205 12:43:30.236945 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.570903 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.573230 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.576776 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.577031 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-x9lwx" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.577280 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.577401 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.579880 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.702751 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.702937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.702961 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.703010 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5smrh\" (UniqueName: \"kubernetes.io/projected/de4ea412-229e-4e53-97ff-86a923c47aac-kube-api-access-5smrh\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.703153 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-config\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.703266 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.703294 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.703314 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.807227 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5smrh\" (UniqueName: \"kubernetes.io/projected/de4ea412-229e-4e53-97ff-86a923c47aac-kube-api-access-5smrh\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.807304 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-config\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.807689 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.807713 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.807992 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808123 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808298 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808319 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-config\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808341 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.808882 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de4ea412-229e-4e53-97ff-86a923c47aac-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.810403 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.815064 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.815315 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.830947 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de4ea412-229e-4e53-97ff-86a923c47aac-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.844930 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5smrh\" (UniqueName: \"kubernetes.io/projected/de4ea412-229e-4e53-97ff-86a923c47aac-kube-api-access-5smrh\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.874417 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de4ea412-229e-4e53-97ff-86a923c47aac\") " pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:33 crc kubenswrapper[4784]: I1205 12:43:33.920623 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.880280 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.880912 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.881092 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5rg6d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_openstack(0a051f14-c8d2-4d57-95a9-9be7c46f9031): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.882655 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-notifications-server-0" podUID="0a051f14-c8d2-4d57-95a9-9be7c46f9031" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.903092 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.903140 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.903352 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dvp96,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.904571 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.930680 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-server-0" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" Dec 05 12:43:52 crc kubenswrapper[4784]: E1205 12:43:52.930957 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-notifications-server-0" podUID="0a051f14-c8d2-4d57-95a9-9be7c46f9031" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.574779 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.575027 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.575134 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gww97,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6697f74bb9-lvd58_openstack(35d88457-3c06-4722-a27b-d343a6b97352): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.576317 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" podUID="35d88457-3c06-4722-a27b-d343a6b97352" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.577474 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.578012 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.578222 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cv5vh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(e9889e9e-8ec4-44aa-a829-327920ab827f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.580026 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.627810 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.627863 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.627969 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4mwjq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-786b66f8cc-7bp78_openstack(1f025e17-70a2-4972-88d2-f3bf817479d4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.629083 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" podUID="1f025e17-70a2-4972-88d2-f3bf817479d4" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.638883 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.638935 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.639028 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8k2qd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-64ff4bc6cc-z9fht_openstack(4d82c03a-55b1-4e18-b073-a75808dc0d9a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.640475 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" podUID="4d82c03a-55b1-4e18-b073-a75808dc0d9a" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.643944 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.643993 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.644124 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c7h56dh5cfh8bh54fhbbhf4h5b9hdch67fhd7h55fh55fh6ch9h548h54ch665h647h6h8fhd6h5dfh5cdh58bh577h66fh695h5fbh55h77h5fcq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2b6kg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7c44d66bd9-9tmnb_openstack(c9e507fa-3712-4c8d-92ae-1b66449ec42c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.645347 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.672448 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.672495 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.672595 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kwg5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86d858c69c-66jcn_openstack(6bf4da93-86e3-498e-8fe9-e97688dc2479): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.675410 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.944768 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.946541 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" Dec 05 12:43:53 crc kubenswrapper[4784]: E1205 12:43:53.978823 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.446260 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.458335 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.468131 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519134 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519625 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mwjq\" (UniqueName: \"kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq\") pod \"1f025e17-70a2-4972-88d2-f3bf817479d4\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519672 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gww97\" (UniqueName: \"kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97\") pod \"35d88457-3c06-4722-a27b-d343a6b97352\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519708 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc\") pod \"1f025e17-70a2-4972-88d2-f3bf817479d4\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519725 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config\") pod \"35d88457-3c06-4722-a27b-d343a6b97352\" (UID: \"35d88457-3c06-4722-a27b-d343a6b97352\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519767 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k2qd\" (UniqueName: \"kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd\") pod \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519795 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config\") pod \"1f025e17-70a2-4972-88d2-f3bf817479d4\" (UID: \"1f025e17-70a2-4972-88d2-f3bf817479d4\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519857 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc\") pod \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.519905 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config\") pod \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\" (UID: \"4d82c03a-55b1-4e18-b073-a75808dc0d9a\") " Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.520673 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config" (OuterVolumeSpecName: "config") pod "4d82c03a-55b1-4e18-b073-a75808dc0d9a" (UID: "4d82c03a-55b1-4e18-b073-a75808dc0d9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.521944 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f025e17-70a2-4972-88d2-f3bf817479d4" (UID: "1f025e17-70a2-4972-88d2-f3bf817479d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.522501 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config" (OuterVolumeSpecName: "config") pod "35d88457-3c06-4722-a27b-d343a6b97352" (UID: "35d88457-3c06-4722-a27b-d343a6b97352"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.522803 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config" (OuterVolumeSpecName: "config") pod "1f025e17-70a2-4972-88d2-f3bf817479d4" (UID: "1f025e17-70a2-4972-88d2-f3bf817479d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.523391 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4d82c03a-55b1-4e18-b073-a75808dc0d9a" (UID: "4d82c03a-55b1-4e18-b073-a75808dc0d9a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.525523 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq" (OuterVolumeSpecName: "kube-api-access-4mwjq") pod "1f025e17-70a2-4972-88d2-f3bf817479d4" (UID: "1f025e17-70a2-4972-88d2-f3bf817479d4"). InnerVolumeSpecName "kube-api-access-4mwjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.526066 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd" (OuterVolumeSpecName: "kube-api-access-8k2qd") pod "4d82c03a-55b1-4e18-b073-a75808dc0d9a" (UID: "4d82c03a-55b1-4e18-b073-a75808dc0d9a"). InnerVolumeSpecName "kube-api-access-8k2qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: W1205 12:43:54.535373 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62eaeb31_76a0_4f2b_9bbe_b00f25a620e3.slice/crio-244047822b047bdeb5853feb627dc66140d0d87c81b4a6052282dfef674d67c7 WatchSource:0}: Error finding container 244047822b047bdeb5853feb627dc66140d0d87c81b4a6052282dfef674d67c7: Status 404 returned error can't find the container with id 244047822b047bdeb5853feb627dc66140d0d87c81b4a6052282dfef674d67c7 Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.536159 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97" (OuterVolumeSpecName: "kube-api-access-gww97") pod "35d88457-3c06-4722-a27b-d343a6b97352" (UID: "35d88457-3c06-4722-a27b-d343a6b97352"). InnerVolumeSpecName "kube-api-access-gww97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.544502 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.559272 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.621977 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622014 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mwjq\" (UniqueName: \"kubernetes.io/projected/1f025e17-70a2-4972-88d2-f3bf817479d4-kube-api-access-4mwjq\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622024 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gww97\" (UniqueName: \"kubernetes.io/projected/35d88457-3c06-4722-a27b-d343a6b97352-kube-api-access-gww97\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622035 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622043 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d88457-3c06-4722-a27b-d343a6b97352-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622052 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k2qd\" (UniqueName: \"kubernetes.io/projected/4d82c03a-55b1-4e18-b073-a75808dc0d9a-kube-api-access-8k2qd\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622061 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f025e17-70a2-4972-88d2-f3bf817479d4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.622070 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d82c03a-55b1-4e18-b073-a75808dc0d9a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.661681 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.694912 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.752992 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.774424 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: W1205 12:43:54.807551 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80b02221_f73a_4535_b2d9_c203e5de2061.slice/crio-2768dc94c73f2ea5bd501291986e189800ce13a06e6aa5d331233853eb3b143c WatchSource:0}: Error finding container 2768dc94c73f2ea5bd501291986e189800ce13a06e6aa5d331233853eb3b143c: Status 404 returned error can't find the container with id 2768dc94c73f2ea5bd501291986e189800ce13a06e6aa5d331233853eb3b143c Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.813056 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.948675 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de4ea412-229e-4e53-97ff-86a923c47aac","Type":"ContainerStarted","Data":"b7a57ee3e5675929e8d54e483b6a16465105c3e543246311eadfa5eb9e096b36"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.950165 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" event={"ID":"1f025e17-70a2-4972-88d2-f3bf817479d4","Type":"ContainerDied","Data":"52f387481be45563aef08301ef9dc4f28f076f108c9d6600076cc3b5953fef9f"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.950208 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-786b66f8cc-7bp78" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.952375 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a93486d9-05bf-44e5-9991-5ca89f117938","Type":"ContainerStarted","Data":"7bdb7fb7965db8d8b4e4c46a7fcc79e0790ec2e4a8d35790f8e0a2d48c8649df"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.953388 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l" event={"ID":"938888bc-6cef-410e-b517-9fdb0c824405","Type":"ContainerStarted","Data":"35b0e2c720ec1040c335b0fec2bde1fd7f9058793d8db543aebe1c8a9302c678"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.954626 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3","Type":"ContainerStarted","Data":"244047822b047bdeb5853feb627dc66140d0d87c81b4a6052282dfef674d67c7"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.955912 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2142f1ca-e4be-48fc-94b9-12d5f7737366","Type":"ContainerStarted","Data":"24d7dc682c8b6335104a9eac4605e12deb556767fbd34eb05c7b01ccce8ca4eb"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.957561 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28e48eb5-7362-46c6-8b58-d1d5f1a111c9","Type":"ContainerStarted","Data":"b359d53fa8c827acd441aea65607d08da9732cf11c084ef406cb64965906373f"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.958622 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" event={"ID":"35d88457-3c06-4722-a27b-d343a6b97352","Type":"ContainerDied","Data":"892a85aa1590fc1929e7907e4f3882cb8cc9fbc785b72b80c5c02249354d76c1"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.958698 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6697f74bb9-lvd58" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.966160 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerStarted","Data":"2768dc94c73f2ea5bd501291986e189800ce13a06e6aa5d331233853eb3b143c"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.967764 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.967804 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64ff4bc6cc-z9fht" event={"ID":"4d82c03a-55b1-4e18-b073-a75808dc0d9a","Type":"ContainerDied","Data":"e46b68285247d126eab5ff88a8c2d89a42a39c423839125164d733917beba929"} Dec 05 12:43:54 crc kubenswrapper[4784]: I1205 12:43:54.968928 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3af95a38-f2ad-44f7-a99d-77d48faa79f8","Type":"ContainerStarted","Data":"8f1fd41cdf632b45bc19ea0f86e17ce30b479d242a364d9787a943a908348bd1"} Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.015710 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.024638 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-786b66f8cc-7bp78"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.054369 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.059796 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6697f74bb9-lvd58"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.096560 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.101966 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64ff4bc6cc-z9fht"] Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.437733 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-6s6n5"] Dec 05 12:43:55 crc kubenswrapper[4784]: W1205 12:43:55.443203 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8762021a_ee57_4a56_b752_da1d808ca0ff.slice/crio-726592b173738bfc01edfa717b4e6c199a6aae4c297d8356a619b92fe186a613 WatchSource:0}: Error finding container 726592b173738bfc01edfa717b4e6c199a6aae4c297d8356a619b92fe186a613: Status 404 returned error can't find the container with id 726592b173738bfc01edfa717b4e6c199a6aae4c297d8356a619b92fe186a613 Dec 05 12:43:55 crc kubenswrapper[4784]: I1205 12:43:55.978870 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6s6n5" event={"ID":"8762021a-ee57-4a56-b752-da1d808ca0ff","Type":"ContainerStarted","Data":"726592b173738bfc01edfa717b4e6c199a6aae4c297d8356a619b92fe186a613"} Dec 05 12:43:57 crc kubenswrapper[4784]: I1205 12:43:57.018782 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f025e17-70a2-4972-88d2-f3bf817479d4" path="/var/lib/kubelet/pods/1f025e17-70a2-4972-88d2-f3bf817479d4/volumes" Dec 05 12:43:57 crc kubenswrapper[4784]: I1205 12:43:57.019287 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35d88457-3c06-4722-a27b-d343a6b97352" path="/var/lib/kubelet/pods/35d88457-3c06-4722-a27b-d343a6b97352/volumes" Dec 05 12:43:57 crc kubenswrapper[4784]: I1205 12:43:57.019724 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d82c03a-55b1-4e18-b073-a75808dc0d9a" path="/var/lib/kubelet/pods/4d82c03a-55b1-4e18-b073-a75808dc0d9a/volumes" Dec 05 12:43:59 crc kubenswrapper[4784]: I1205 12:43:59.573009 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:43:59 crc kubenswrapper[4784]: I1205 12:43:59.573511 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.059719 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l" event={"ID":"938888bc-6cef-410e-b517-9fdb0c824405","Type":"ContainerStarted","Data":"bb283b228446f2d6403764365fc95ddbc5779f91709b7910480ef4a47a6bc746"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.060560 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ljc2l" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.061592 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3af95a38-f2ad-44f7-a99d-77d48faa79f8","Type":"ContainerStarted","Data":"7ae61a7d4ba248c307c5fffea56d2bad8b7b15649aebe89bf12f78338fd56195"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.062960 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3","Type":"ContainerStarted","Data":"d915262e0e4be5d54bd56f45e88aabe1b1a96e9261ad96075ffd60be915e3d7a"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.064435 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2142f1ca-e4be-48fc-94b9-12d5f7737366","Type":"ContainerStarted","Data":"5dfecbb2965521c56a01c09a89133a8b6a33dbf84a327e373dd72d01251a5b40"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.065909 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6s6n5" event={"ID":"8762021a-ee57-4a56-b752-da1d808ca0ff","Type":"ContainerStarted","Data":"156cbfb8da88d5dd754216bdbaae54a71c4bc1cc9af456fd1e91e0680775e9b3"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.067887 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de4ea412-229e-4e53-97ff-86a923c47aac","Type":"ContainerStarted","Data":"4389311aa37e7757d1843efe7a3acde406192f299c169891a5ea59107b30c32c"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.069546 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28e48eb5-7362-46c6-8b58-d1d5f1a111c9","Type":"ContainerStarted","Data":"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.069766 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.072336 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a93486d9-05bf-44e5-9991-5ca89f117938","Type":"ContainerStarted","Data":"77a5034e1011c8d91312751ab6a675203604232b4b8a6c5af113e3d4e6419591"} Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.072499 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.079943 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ljc2l" podStartSLOduration=28.078283747 podStartE2EDuration="34.079925364s" podCreationTimestamp="2025-12-05 12:43:29 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.762332064 +0000 UTC m=+1114.182398879" lastFinishedPulling="2025-12-05 12:44:00.763973681 +0000 UTC m=+1120.184040496" observedRunningTime="2025-12-05 12:44:03.079724988 +0000 UTC m=+1122.499791813" watchObservedRunningTime="2025-12-05 12:44:03.079925364 +0000 UTC m=+1122.499992179" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.148461 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=30.462158497 podStartE2EDuration="38.148437336s" podCreationTimestamp="2025-12-05 12:43:25 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.512341321 +0000 UTC m=+1113.932408136" lastFinishedPulling="2025-12-05 12:44:02.19862016 +0000 UTC m=+1121.618686975" observedRunningTime="2025-12-05 12:44:03.145543165 +0000 UTC m=+1122.565610000" watchObservedRunningTime="2025-12-05 12:44:03.148437336 +0000 UTC m=+1122.568504171" Dec 05 12:44:03 crc kubenswrapper[4784]: I1205 12:44:03.183614 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=33.920397822 podStartE2EDuration="40.183595674s" podCreationTimestamp="2025-12-05 12:43:23 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.500724908 +0000 UTC m=+1113.920791713" lastFinishedPulling="2025-12-05 12:44:00.76392275 +0000 UTC m=+1120.183989565" observedRunningTime="2025-12-05 12:44:03.180085925 +0000 UTC m=+1122.600152740" watchObservedRunningTime="2025-12-05 12:44:03.183595674 +0000 UTC m=+1122.603662489" Dec 05 12:44:04 crc kubenswrapper[4784]: I1205 12:44:04.080954 4784 generic.go:334] "Generic (PLEG): container finished" podID="8762021a-ee57-4a56-b752-da1d808ca0ff" containerID="156cbfb8da88d5dd754216bdbaae54a71c4bc1cc9af456fd1e91e0680775e9b3" exitCode=0 Dec 05 12:44:04 crc kubenswrapper[4784]: I1205 12:44:04.082156 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6s6n5" event={"ID":"8762021a-ee57-4a56-b752-da1d808ca0ff","Type":"ContainerDied","Data":"156cbfb8da88d5dd754216bdbaae54a71c4bc1cc9af456fd1e91e0680775e9b3"} Dec 05 12:44:05 crc kubenswrapper[4784]: I1205 12:44:05.131960 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerStarted","Data":"f48a9ccf406483f0a446cb52c5e2946429b81856ad61cf5b3146d8b0adda3e99"} Dec 05 12:44:05 crc kubenswrapper[4784]: I1205 12:44:05.135396 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6s6n5" event={"ID":"8762021a-ee57-4a56-b752-da1d808ca0ff","Type":"ContainerStarted","Data":"fb19dfbea06ebf8fee16520f3319da39218026b927faa3c57c625cf7b2a7bcad"} Dec 05 12:44:07 crc kubenswrapper[4784]: I1205 12:44:07.157665 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de4ea412-229e-4e53-97ff-86a923c47aac","Type":"ContainerStarted","Data":"84a985c0da27a5ec4de44e6e31908071698c372940576a650dbb53892e027156"} Dec 05 12:44:07 crc kubenswrapper[4784]: I1205 12:44:07.191442 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=23.402178682 podStartE2EDuration="35.191420707s" podCreationTimestamp="2025-12-05 12:43:32 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.707682836 +0000 UTC m=+1114.127749651" lastFinishedPulling="2025-12-05 12:44:06.496924861 +0000 UTC m=+1125.916991676" observedRunningTime="2025-12-05 12:44:07.186148503 +0000 UTC m=+1126.606215348" watchObservedRunningTime="2025-12-05 12:44:07.191420707 +0000 UTC m=+1126.611487532" Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.170739 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3af95a38-f2ad-44f7-a99d-77d48faa79f8","Type":"ContainerStarted","Data":"051ebb814f63e1bf39c2ffbc0687993e44c900953872d119910d965dd9da7d20"} Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.173902 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6s6n5" event={"ID":"8762021a-ee57-4a56-b752-da1d808ca0ff","Type":"ContainerStarted","Data":"8a5aeddd38a9142608c61bbd90fcd39b175bda05f7a60b69ead53cebfebd0425"} Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.174139 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.177724 4784 generic.go:334] "Generic (PLEG): container finished" podID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerID="95ee65bea06e4234464eb03a027bcda18e5be9d73fdc9c8ee2238789a14c9926" exitCode=0 Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.178673 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" event={"ID":"6bf4da93-86e3-498e-8fe9-e97688dc2479","Type":"ContainerDied","Data":"95ee65bea06e4234464eb03a027bcda18e5be9d73fdc9c8ee2238789a14c9926"} Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.203535 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=28.51877656 podStartE2EDuration="40.203515909s" podCreationTimestamp="2025-12-05 12:43:28 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.825255861 +0000 UTC m=+1114.245322676" lastFinishedPulling="2025-12-05 12:44:06.50999518 +0000 UTC m=+1125.930062025" observedRunningTime="2025-12-05 12:44:08.202132266 +0000 UTC m=+1127.622199091" watchObservedRunningTime="2025-12-05 12:44:08.203515909 +0000 UTC m=+1127.623582724" Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.239888 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-6s6n5" podStartSLOduration=34.092234009 podStartE2EDuration="39.239873715s" podCreationTimestamp="2025-12-05 12:43:29 +0000 UTC" firstStartedPulling="2025-12-05 12:43:55.445786505 +0000 UTC m=+1114.865853320" lastFinishedPulling="2025-12-05 12:44:00.593426211 +0000 UTC m=+1120.013493026" observedRunningTime="2025-12-05 12:44:08.233721422 +0000 UTC m=+1127.653788237" watchObservedRunningTime="2025-12-05 12:44:08.239873715 +0000 UTC m=+1127.659940530" Dec 05 12:44:08 crc kubenswrapper[4784]: I1205 12:44:08.922236 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.131698 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.195684 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" event={"ID":"6bf4da93-86e3-498e-8fe9-e97688dc2479","Type":"ContainerStarted","Data":"6faa841d2513bce1ed947d1c08cad77255c967d2d8d1eceead2a6079b7a8c5b7"} Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.196289 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.199169 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"0a051f14-c8d2-4d57-95a9-9be7c46f9031","Type":"ContainerStarted","Data":"e949137b0e1c62a396337d8c7c4ca9ffa0bdff069c634228e646ac06f66e0447"} Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.204007 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerStarted","Data":"22363b649441c80e6ba0100af0b361bd0e296e9bee6f6dfe1d164d8d100e3153"} Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.204080 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.227893 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" podStartSLOduration=4.104812076 podStartE2EDuration="51.227869854s" podCreationTimestamp="2025-12-05 12:43:18 +0000 UTC" firstStartedPulling="2025-12-05 12:43:19.355807748 +0000 UTC m=+1078.775874563" lastFinishedPulling="2025-12-05 12:44:06.478865526 +0000 UTC m=+1125.898932341" observedRunningTime="2025-12-05 12:44:09.215326602 +0000 UTC m=+1128.635393447" watchObservedRunningTime="2025-12-05 12:44:09.227869854 +0000 UTC m=+1128.647936679" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.238471 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.297484 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.922841 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 12:44:09 crc kubenswrapper[4784]: I1205 12:44:09.967577 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.212163 4784 generic.go:334] "Generic (PLEG): container finished" podID="62eaeb31-76a0-4f2b-9bbe-b00f25a620e3" containerID="d915262e0e4be5d54bd56f45e88aabe1b1a96e9261ad96075ffd60be915e3d7a" exitCode=0 Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.212260 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3","Type":"ContainerDied","Data":"d915262e0e4be5d54bd56f45e88aabe1b1a96e9261ad96075ffd60be915e3d7a"} Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.214579 4784 generic.go:334] "Generic (PLEG): container finished" podID="2142f1ca-e4be-48fc-94b9-12d5f7737366" containerID="5dfecbb2965521c56a01c09a89133a8b6a33dbf84a327e373dd72d01251a5b40" exitCode=0 Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.214630 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2142f1ca-e4be-48fc-94b9-12d5f7737366","Type":"ContainerDied","Data":"5dfecbb2965521c56a01c09a89133a8b6a33dbf84a327e373dd72d01251a5b40"} Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.216127 4784 generic.go:334] "Generic (PLEG): container finished" podID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerID="3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a" exitCode=0 Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.216243 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" event={"ID":"c9e507fa-3712-4c8d-92ae-1b66449ec42c","Type":"ContainerDied","Data":"3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a"} Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.220026 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerStarted","Data":"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce"} Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.221219 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.286343 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.295606 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.496993 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.521866 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.524038 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.528571 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.539130 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.589084 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-sjfhv"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.590097 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.595352 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.617542 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.617603 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vt77\" (UniqueName: \"kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.617627 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.617991 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.619385 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sjfhv"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719219 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6wnz\" (UniqueName: \"kubernetes.io/projected/646c01bd-0f76-4fbc-aae4-9d679cde5796-kube-api-access-n6wnz\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719312 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-combined-ca-bundle\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719498 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719583 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719647 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vt77\" (UniqueName: \"kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719709 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.719730 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovs-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.720077 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.720333 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.720480 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.720636 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovn-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.720799 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646c01bd-0f76-4fbc-aae4-9d679cde5796-config\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.739851 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vt77\" (UniqueName: \"kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77\") pod \"dnsmasq-dns-ccfb859df-n5vsw\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.776564 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.783055 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.786960 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.788787 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.789073 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-bvmrv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.790303 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.791706 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822238 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-combined-ca-bundle\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822319 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822358 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovs-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822390 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovn-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822433 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646c01bd-0f76-4fbc-aae4-9d679cde5796-config\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.822479 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6wnz\" (UniqueName: \"kubernetes.io/projected/646c01bd-0f76-4fbc-aae4-9d679cde5796-kube-api-access-n6wnz\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.823493 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovs-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.827861 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-combined-ca-bundle\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.827960 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/646c01bd-0f76-4fbc-aae4-9d679cde5796-ovn-rundir\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.828141 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/646c01bd-0f76-4fbc-aae4-9d679cde5796-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.828407 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646c01bd-0f76-4fbc-aae4-9d679cde5796-config\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.828410 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.830022 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.842664 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.851076 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.854644 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.886288 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.889821 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6wnz\" (UniqueName: \"kubernetes.io/projected/646c01bd-0f76-4fbc-aae4-9d679cde5796-kube-api-access-n6wnz\") pod \"ovn-controller-metrics-sjfhv\" (UID: \"646c01bd-0f76-4fbc-aae4-9d679cde5796\") " pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.914145 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sjfhv" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.931965 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932008 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932030 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jkg7\" (UniqueName: \"kubernetes.io/projected/961dcf80-20f3-48f5-818b-2c497ce58e01-kube-api-access-4jkg7\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932060 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932104 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932120 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932140 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-config\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932167 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54cbx\" (UniqueName: \"kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932204 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932223 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932260 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-scripts\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:10 crc kubenswrapper[4784]: I1205 12:44:10.932279 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.033991 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-scripts\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.034043 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.034663 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.034695 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.034765 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jkg7\" (UniqueName: \"kubernetes.io/projected/961dcf80-20f3-48f5-818b-2c497ce58e01-kube-api-access-4jkg7\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.034920 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-scripts\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.036622 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.036755 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.036827 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.036845 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.036868 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-config\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.037553 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.038564 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.039796 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54cbx\" (UniqueName: \"kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.039905 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.040739 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.041075 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.041405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.042844 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/961dcf80-20f3-48f5-818b-2c497ce58e01-config\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.042856 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.043413 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.044918 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961dcf80-20f3-48f5-818b-2c497ce58e01-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.053799 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54cbx\" (UniqueName: \"kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx\") pod \"dnsmasq-dns-55957bfdc7-v26wk\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.057098 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jkg7\" (UniqueName: \"kubernetes.io/projected/961dcf80-20f3-48f5-818b-2c497ce58e01-kube-api-access-4jkg7\") pod \"ovn-northd-0\" (UID: \"961dcf80-20f3-48f5-818b-2c497ce58e01\") " pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.103769 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.148620 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.171285 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.234655 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" event={"ID":"8c6c365b-59eb-4668-9f15-42dcc18f87bd","Type":"ContainerStarted","Data":"5b1fc0c7e66968a6d3e2edfe86efdcec1d4a57eb74ac874820731292deef4eaa"} Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.237821 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2142f1ca-e4be-48fc-94b9-12d5f7737366","Type":"ContainerStarted","Data":"605dbdcacface1a480fa8f6909bd8e235951d6b85a6bbb557a0c7ca7f8e3e3a5"} Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.240603 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="dnsmasq-dns" containerID="cri-o://6faa841d2513bce1ed947d1c08cad77255c967d2d8d1eceead2a6079b7a8c5b7" gracePeriod=10 Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.240688 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" event={"ID":"c9e507fa-3712-4c8d-92ae-1b66449ec42c","Type":"ContainerStarted","Data":"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5"} Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.443254 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sjfhv"] Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.580316 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:11 crc kubenswrapper[4784]: I1205 12:44:11.589194 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.283426 4784 generic.go:334] "Generic (PLEG): container finished" podID="8800798b-a78e-432a-a000-c8ad9645c67c" containerID="0ea7a549f06e209d37b14d8e89ee0ab1b2bb08a11a4a2b2111f8ba5fd79ccaf9" exitCode=0 Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.283516 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" event={"ID":"8800798b-a78e-432a-a000-c8ad9645c67c","Type":"ContainerDied","Data":"0ea7a549f06e209d37b14d8e89ee0ab1b2bb08a11a4a2b2111f8ba5fd79ccaf9"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.283888 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" event={"ID":"8800798b-a78e-432a-a000-c8ad9645c67c","Type":"ContainerStarted","Data":"bee4a2f3e94904539caa93d4dfe654e1a75a6278cf8b6ce33850c86cd5938051"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.290497 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"961dcf80-20f3-48f5-818b-2c497ce58e01","Type":"ContainerStarted","Data":"97247b5d28f031adf3fac4bd6b6d8cee44802622b64bfe5dc507c521d717927e"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.296827 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sjfhv" event={"ID":"646c01bd-0f76-4fbc-aae4-9d679cde5796","Type":"ContainerStarted","Data":"ed45bcaddfb5edf217f87ff7fd7fe111c0cea3e72ad0bc2e2e91a120716b7cc4"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.296896 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sjfhv" event={"ID":"646c01bd-0f76-4fbc-aae4-9d679cde5796","Type":"ContainerStarted","Data":"594bf0118719784169b52619ac63e54737aaada5c340cdf376fbb211e9e217d9"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.303737 4784 generic.go:334] "Generic (PLEG): container finished" podID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerID="6faa841d2513bce1ed947d1c08cad77255c967d2d8d1eceead2a6079b7a8c5b7" exitCode=0 Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.303820 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" event={"ID":"6bf4da93-86e3-498e-8fe9-e97688dc2479","Type":"ContainerDied","Data":"6faa841d2513bce1ed947d1c08cad77255c967d2d8d1eceead2a6079b7a8c5b7"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.315257 4784 generic.go:334] "Generic (PLEG): container finished" podID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerID="6c6476d6148bbf208b1f8335c3d8cb99674e2dba9c2a010d70f166e428641aae" exitCode=0 Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.315369 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" event={"ID":"8c6c365b-59eb-4668-9f15-42dcc18f87bd","Type":"ContainerDied","Data":"6c6476d6148bbf208b1f8335c3d8cb99674e2dba9c2a010d70f166e428641aae"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.318148 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"62eaeb31-76a0-4f2b-9bbe-b00f25a620e3","Type":"ContainerStarted","Data":"7277f820d7ebb77fbf8776ac5bff760329a1651358fcf17f05727cf609bdd606"} Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.318263 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="dnsmasq-dns" containerID="cri-o://034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5" gracePeriod=10 Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.318423 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.341073 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-sjfhv" podStartSLOduration=2.341011973 podStartE2EDuration="2.341011973s" podCreationTimestamp="2025-12-05 12:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:12.324703273 +0000 UTC m=+1131.744770108" watchObservedRunningTime="2025-12-05 12:44:12.341011973 +0000 UTC m=+1131.761078788" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.362179 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=44.136427384 podStartE2EDuration="50.362153794s" podCreationTimestamp="2025-12-05 12:43:22 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.537606661 +0000 UTC m=+1113.957673486" lastFinishedPulling="2025-12-05 12:44:00.763333081 +0000 UTC m=+1120.183399896" observedRunningTime="2025-12-05 12:44:12.35531159 +0000 UTC m=+1131.775378425" watchObservedRunningTime="2025-12-05 12:44:12.362153794 +0000 UTC m=+1131.782220619" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.405603 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" podStartSLOduration=-9223371982.44919 podStartE2EDuration="54.405584851s" podCreationTimestamp="2025-12-05 12:43:18 +0000 UTC" firstStartedPulling="2025-12-05 12:43:19.865688815 +0000 UTC m=+1079.285755630" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:12.404232279 +0000 UTC m=+1131.824299104" watchObservedRunningTime="2025-12-05 12:44:12.405584851 +0000 UTC m=+1131.825651666" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.436015 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=46.392823976 podStartE2EDuration="52.435995782s" podCreationTimestamp="2025-12-05 12:43:20 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.720075173 +0000 UTC m=+1114.140141988" lastFinishedPulling="2025-12-05 12:44:00.763246959 +0000 UTC m=+1120.183313794" observedRunningTime="2025-12-05 12:44:12.429549751 +0000 UTC m=+1131.849616586" watchObservedRunningTime="2025-12-05 12:44:12.435995782 +0000 UTC m=+1131.856062587" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.772836 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.914868 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwg5t\" (UniqueName: \"kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t\") pod \"6bf4da93-86e3-498e-8fe9-e97688dc2479\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.915401 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config\") pod \"6bf4da93-86e3-498e-8fe9-e97688dc2479\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.915453 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc\") pod \"6bf4da93-86e3-498e-8fe9-e97688dc2479\" (UID: \"6bf4da93-86e3-498e-8fe9-e97688dc2479\") " Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.921665 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t" (OuterVolumeSpecName: "kube-api-access-kwg5t") pod "6bf4da93-86e3-498e-8fe9-e97688dc2479" (UID: "6bf4da93-86e3-498e-8fe9-e97688dc2479"). InnerVolumeSpecName "kube-api-access-kwg5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:12 crc kubenswrapper[4784]: I1205 12:44:12.975393 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.014896 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6bf4da93-86e3-498e-8fe9-e97688dc2479" (UID: "6bf4da93-86e3-498e-8fe9-e97688dc2479"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.015060 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config" (OuterVolumeSpecName: "config") pod "6bf4da93-86e3-498e-8fe9-e97688dc2479" (UID: "6bf4da93-86e3-498e-8fe9-e97688dc2479"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.017344 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.017373 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bf4da93-86e3-498e-8fe9-e97688dc2479-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.017385 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwg5t\" (UniqueName: \"kubernetes.io/projected/6bf4da93-86e3-498e-8fe9-e97688dc2479-kube-api-access-kwg5t\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.118837 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config\") pod \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.118989 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b6kg\" (UniqueName: \"kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg\") pod \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.119060 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc\") pod \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\" (UID: \"c9e507fa-3712-4c8d-92ae-1b66449ec42c\") " Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.122209 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg" (OuterVolumeSpecName: "kube-api-access-2b6kg") pod "c9e507fa-3712-4c8d-92ae-1b66449ec42c" (UID: "c9e507fa-3712-4c8d-92ae-1b66449ec42c"). InnerVolumeSpecName "kube-api-access-2b6kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.161386 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c9e507fa-3712-4c8d-92ae-1b66449ec42c" (UID: "c9e507fa-3712-4c8d-92ae-1b66449ec42c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.162096 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config" (OuterVolumeSpecName: "config") pod "c9e507fa-3712-4c8d-92ae-1b66449ec42c" (UID: "c9e507fa-3712-4c8d-92ae-1b66449ec42c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.223114 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.223153 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b6kg\" (UniqueName: \"kubernetes.io/projected/c9e507fa-3712-4c8d-92ae-1b66449ec42c-kube-api-access-2b6kg\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.223163 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9e507fa-3712-4c8d-92ae-1b66449ec42c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.331400 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.331428 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d858c69c-66jcn" event={"ID":"6bf4da93-86e3-498e-8fe9-e97688dc2479","Type":"ContainerDied","Data":"c30229b5f153552da7373a88a1e96038c42a8bbc82cfbcba52f0da483bdb3d96"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.331536 4784 scope.go:117] "RemoveContainer" containerID="6faa841d2513bce1ed947d1c08cad77255c967d2d8d1eceead2a6079b7a8c5b7" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.336219 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" event={"ID":"8c6c365b-59eb-4668-9f15-42dcc18f87bd","Type":"ContainerStarted","Data":"9d945cbd6f7ac6721b49c9ffdb5bd37281fdf713d0ce95a58009465c2e44b866"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.336309 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.341778 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" event={"ID":"8800798b-a78e-432a-a000-c8ad9645c67c","Type":"ContainerStarted","Data":"4c40c89f169ea0efaea2f1d6aff4144dc998c9d25bb0356c64800a2edcc7b7ff"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.342617 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.346624 4784 generic.go:334] "Generic (PLEG): container finished" podID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerID="034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5" exitCode=0 Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.346712 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" event={"ID":"c9e507fa-3712-4c8d-92ae-1b66449ec42c","Type":"ContainerDied","Data":"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.346861 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" event={"ID":"c9e507fa-3712-4c8d-92ae-1b66449ec42c","Type":"ContainerDied","Data":"6cbf8ffed71c0a618bfae84d3b3706c03918773712d26445ab17969afe84c2ac"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.346761 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c44d66bd9-9tmnb" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.349373 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"961dcf80-20f3-48f5-818b-2c497ce58e01","Type":"ContainerStarted","Data":"c9d4fb942dc8ba4e11f130ea9b936b9c49e6493b938843e04dc5c1db9e871729"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.349532 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.355823 4784 generic.go:334] "Generic (PLEG): container finished" podID="80b02221-f73a-4535-b2d9-c203e5de2061" containerID="f48a9ccf406483f0a446cb52c5e2946429b81856ad61cf5b3146d8b0adda3e99" exitCode=0 Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.356743 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerDied","Data":"f48a9ccf406483f0a446cb52c5e2946429b81856ad61cf5b3146d8b0adda3e99"} Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.360611 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.363543 4784 scope.go:117] "RemoveContainer" containerID="95ee65bea06e4234464eb03a027bcda18e5be9d73fdc9c8ee2238789a14c9926" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.367481 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86d858c69c-66jcn"] Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.381249 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" podStartSLOduration=3.381227995 podStartE2EDuration="3.381227995s" podCreationTimestamp="2025-12-05 12:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:13.377009352 +0000 UTC m=+1132.797076167" watchObservedRunningTime="2025-12-05 12:44:13.381227995 +0000 UTC m=+1132.801294810" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.393148 4784 scope.go:117] "RemoveContainer" containerID="034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.402599 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.189038423 podStartE2EDuration="3.402578882s" podCreationTimestamp="2025-12-05 12:44:10 +0000 UTC" firstStartedPulling="2025-12-05 12:44:11.592997064 +0000 UTC m=+1131.013063869" lastFinishedPulling="2025-12-05 12:44:12.806537513 +0000 UTC m=+1132.226604328" observedRunningTime="2025-12-05 12:44:13.397978118 +0000 UTC m=+1132.818044933" watchObservedRunningTime="2025-12-05 12:44:13.402578882 +0000 UTC m=+1132.822645707" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.431900 4784 scope.go:117] "RemoveContainer" containerID="3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.448072 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" podStartSLOduration=3.448051623 podStartE2EDuration="3.448051623s" podCreationTimestamp="2025-12-05 12:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:13.44637073 +0000 UTC m=+1132.866437545" watchObservedRunningTime="2025-12-05 12:44:13.448051623 +0000 UTC m=+1132.868118428" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.450489 4784 scope.go:117] "RemoveContainer" containerID="034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5" Dec 05 12:44:13 crc kubenswrapper[4784]: E1205 12:44:13.450993 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5\": container with ID starting with 034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5 not found: ID does not exist" containerID="034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.451046 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5"} err="failed to get container status \"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5\": rpc error: code = NotFound desc = could not find container \"034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5\": container with ID starting with 034323f483c72ee9eea93d4812e263e95a25aa29b3eceb805072b35eb15513a5 not found: ID does not exist" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.451081 4784 scope.go:117] "RemoveContainer" containerID="3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a" Dec 05 12:44:13 crc kubenswrapper[4784]: E1205 12:44:13.451429 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a\": container with ID starting with 3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a not found: ID does not exist" containerID="3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.451488 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a"} err="failed to get container status \"3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a\": rpc error: code = NotFound desc = could not find container \"3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a\": container with ID starting with 3b543bf7bdef0d955c5270f4a61a30853c827ab21670a240222b3364d401d80a not found: ID does not exist" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.470278 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.477217 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c44d66bd9-9tmnb"] Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.900764 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 12:44:13 crc kubenswrapper[4784]: I1205 12:44:13.900982 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 12:44:14 crc kubenswrapper[4784]: I1205 12:44:14.390239 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"961dcf80-20f3-48f5-818b-2c497ce58e01","Type":"ContainerStarted","Data":"8f12b6dfd026094c6091bab5b5df251216c3e0bb5990922cf031d60b2ac07441"} Dec 05 12:44:15 crc kubenswrapper[4784]: I1205 12:44:15.010822 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" path="/var/lib/kubelet/pods/6bf4da93-86e3-498e-8fe9-e97688dc2479/volumes" Dec 05 12:44:15 crc kubenswrapper[4784]: I1205 12:44:15.012213 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" path="/var/lib/kubelet/pods/c9e507fa-3712-4c8d-92ae-1b66449ec42c/volumes" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.026414 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.026617 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="dnsmasq-dns" containerID="cri-o://4c40c89f169ea0efaea2f1d6aff4144dc998c9d25bb0356c64800a2edcc7b7ff" gracePeriod=10 Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.064839 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:44:16 crc kubenswrapper[4784]: E1205 12:44:16.065373 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065391 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: E1205 12:44:16.065409 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="init" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065415 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="init" Dec 05 12:44:16 crc kubenswrapper[4784]: E1205 12:44:16.065435 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="init" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065443 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="init" Dec 05 12:44:16 crc kubenswrapper[4784]: E1205 12:44:16.065458 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065463 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065619 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9e507fa-3712-4c8d-92ae-1b66449ec42c" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.065644 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf4da93-86e3-498e-8fe9-e97688dc2479" containerName="dnsmasq-dns" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.066476 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.073525 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.080612 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.183087 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.183142 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.183181 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.183239 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.183262 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gsf4\" (UniqueName: \"kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.284576 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.284628 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gsf4\" (UniqueName: \"kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.284742 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.284774 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.284819 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.285577 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.285812 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.285813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.286527 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.303979 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gsf4\" (UniqueName: \"kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4\") pod \"dnsmasq-dns-5dd44fd6df-gsqs9\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.381116 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.430480 4784 generic.go:334] "Generic (PLEG): container finished" podID="8800798b-a78e-432a-a000-c8ad9645c67c" containerID="4c40c89f169ea0efaea2f1d6aff4144dc998c9d25bb0356c64800a2edcc7b7ff" exitCode=0 Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.430527 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" event={"ID":"8800798b-a78e-432a-a000-c8ad9645c67c","Type":"ContainerDied","Data":"4c40c89f169ea0efaea2f1d6aff4144dc998c9d25bb0356c64800a2edcc7b7ff"} Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.674311 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:44:16 crc kubenswrapper[4784]: W1205 12:44:16.680065 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb48c645_1ac8_450e_9cc4_c8ff59d3fb0d.slice/crio-b3f67aa82ef88bb5ec297d2ee4b30ced67c11c89059264421c84863f5f2e9e45 WatchSource:0}: Error finding container b3f67aa82ef88bb5ec297d2ee4b30ced67c11c89059264421c84863f5f2e9e45: Status 404 returned error can't find the container with id b3f67aa82ef88bb5ec297d2ee4b30ced67c11c89059264421c84863f5f2e9e45 Dec 05 12:44:16 crc kubenswrapper[4784]: I1205 12:44:16.887093 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.004908 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb\") pod \"8800798b-a78e-432a-a000-c8ad9645c67c\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.004994 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config\") pod \"8800798b-a78e-432a-a000-c8ad9645c67c\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.005025 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54cbx\" (UniqueName: \"kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx\") pod \"8800798b-a78e-432a-a000-c8ad9645c67c\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.005334 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc\") pod \"8800798b-a78e-432a-a000-c8ad9645c67c\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.005371 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb\") pod \"8800798b-a78e-432a-a000-c8ad9645c67c\" (UID: \"8800798b-a78e-432a-a000-c8ad9645c67c\") " Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.015421 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx" (OuterVolumeSpecName: "kube-api-access-54cbx") pod "8800798b-a78e-432a-a000-c8ad9645c67c" (UID: "8800798b-a78e-432a-a000-c8ad9645c67c"). InnerVolumeSpecName "kube-api-access-54cbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.055335 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8800798b-a78e-432a-a000-c8ad9645c67c" (UID: "8800798b-a78e-432a-a000-c8ad9645c67c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.055401 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config" (OuterVolumeSpecName: "config") pod "8800798b-a78e-432a-a000-c8ad9645c67c" (UID: "8800798b-a78e-432a-a000-c8ad9645c67c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.071407 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8800798b-a78e-432a-a000-c8ad9645c67c" (UID: "8800798b-a78e-432a-a000-c8ad9645c67c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.073483 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8800798b-a78e-432a-a000-c8ad9645c67c" (UID: "8800798b-a78e-432a-a000-c8ad9645c67c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.108462 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.108502 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.108516 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54cbx\" (UniqueName: \"kubernetes.io/projected/8800798b-a78e-432a-a000-c8ad9645c67c-kube-api-access-54cbx\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.108531 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.108546 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8800798b-a78e-432a-a000-c8ad9645c67c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.170620 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.170968 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="init" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.170987 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="init" Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.170999 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="dnsmasq-dns" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.171006 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="dnsmasq-dns" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.171201 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" containerName="dnsmasq-dns" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.175882 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.178335 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tj58g" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.178341 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.178620 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.179275 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.190845 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.311469 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.311555 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-lock\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.311602 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.311647 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-cache\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.311685 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f952p\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-kube-api-access-f952p\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.413629 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.413725 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-cache\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.413797 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f952p\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-kube-api-access-f952p\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.413886 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.413922 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.413997 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:17.913970875 +0000 UTC m=+1137.334037730 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.413901 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.414169 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-lock\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.414281 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.414736 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-cache\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.415055 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-lock\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.440110 4784 generic.go:334] "Generic (PLEG): container finished" podID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerID="b7b44418f91ea98db4f90c0eb3f4c6c045d22c887bac95d723b9b1e16849661e" exitCode=0 Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.440218 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" event={"ID":"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d","Type":"ContainerDied","Data":"b7b44418f91ea98db4f90c0eb3f4c6c045d22c887bac95d723b9b1e16849661e"} Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.440246 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" event={"ID":"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d","Type":"ContainerStarted","Data":"b3f67aa82ef88bb5ec297d2ee4b30ced67c11c89059264421c84863f5f2e9e45"} Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.445926 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" event={"ID":"8800798b-a78e-432a-a000-c8ad9645c67c","Type":"ContainerDied","Data":"bee4a2f3e94904539caa93d4dfe654e1a75a6278cf8b6ce33850c86cd5938051"} Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.446300 4784 scope.go:117] "RemoveContainer" containerID="4c40c89f169ea0efaea2f1d6aff4144dc998c9d25bb0356c64800a2edcc7b7ff" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.446140 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55957bfdc7-v26wk" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.452960 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f952p\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-kube-api-access-f952p\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.460417 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.532661 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.541135 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55957bfdc7-v26wk"] Dec 05 12:44:17 crc kubenswrapper[4784]: I1205 12:44:17.923726 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.924013 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.924052 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:17 crc kubenswrapper[4784]: E1205 12:44:17.924129 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:18.924107209 +0000 UTC m=+1138.344174044 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:18 crc kubenswrapper[4784]: I1205 12:44:18.035924 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 12:44:18 crc kubenswrapper[4784]: I1205 12:44:18.187499 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 12:44:18 crc kubenswrapper[4784]: I1205 12:44:18.938570 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:18 crc kubenswrapper[4784]: E1205 12:44:18.938779 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:18 crc kubenswrapper[4784]: E1205 12:44:18.939129 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:18 crc kubenswrapper[4784]: E1205 12:44:18.939215 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:20.939176773 +0000 UTC m=+1140.359243598 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:19 crc kubenswrapper[4784]: I1205 12:44:19.012612 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8800798b-a78e-432a-a000-c8ad9645c67c" path="/var/lib/kubelet/pods/8800798b-a78e-432a-a000-c8ad9645c67c/volumes" Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.013895 4784 scope.go:117] "RemoveContainer" containerID="0ea7a549f06e209d37b14d8e89ee0ab1b2bb08a11a4a2b2111f8ba5fd79ccaf9" Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.472572 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" event={"ID":"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d","Type":"ContainerStarted","Data":"e3e8edd616699567cb85196ebc38758d923e6b64e26511be84467d941ece2f8b"} Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.473505 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.474914 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerStarted","Data":"b6ea72f4c0636fa6fcf55d4829102b8db5d000b56c5326bf0e7cb7adb514c10c"} Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.498438 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podStartSLOduration=4.498418286 podStartE2EDuration="4.498418286s" podCreationTimestamp="2025-12-05 12:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:20.494531005 +0000 UTC m=+1139.914597830" watchObservedRunningTime="2025-12-05 12:44:20.498418286 +0000 UTC m=+1139.918485111" Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.857648 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:20 crc kubenswrapper[4784]: I1205 12:44:20.976808 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:20 crc kubenswrapper[4784]: E1205 12:44:20.977047 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:20 crc kubenswrapper[4784]: E1205 12:44:20.977077 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:20 crc kubenswrapper[4784]: E1205 12:44:20.977134 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:24.977114379 +0000 UTC m=+1144.397181294 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.148979 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-zbthx"] Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.150063 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.158553 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.158903 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.160425 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.181705 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-5js9g"] Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.183100 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.206414 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-5js9g"] Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.210720 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-zbthx"] Dec 05 12:44:21 crc kubenswrapper[4784]: E1205 12:44:21.211400 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-q47fr ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-q47fr ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-zbthx" podUID="65fddfcc-6286-4146-8f8b-bb9e50ba2bee" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.258898 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-zbthx"] Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281494 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281754 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281783 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281841 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281889 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281906 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281945 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q47fr\" (UniqueName: \"kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281963 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.281988 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.282055 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4l76\" (UniqueName: \"kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.282073 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.282112 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.282133 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.282178 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383197 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383449 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383564 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383639 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383746 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383819 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383886 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.383964 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q47fr\" (UniqueName: \"kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.384038 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.384124 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.384236 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4l76\" (UniqueName: \"kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.384319 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.385067 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.385214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.385287 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.384774 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.385323 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.385724 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.386065 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.386672 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.389342 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.389357 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.389495 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.391042 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.392349 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.398626 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.404326 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4l76\" (UniqueName: \"kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76\") pod \"swift-ring-rebalance-5js9g\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.409092 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q47fr\" (UniqueName: \"kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr\") pod \"swift-ring-rebalance-zbthx\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.489019 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.502079 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.530917 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tj58g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.539668 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.587553 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.587798 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.588044 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q47fr\" (UniqueName: \"kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.588281 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.588474 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.588678 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.588899 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle\") pod \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\" (UID: \"65fddfcc-6286-4146-8f8b-bb9e50ba2bee\") " Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.589017 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.589599 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.590147 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts" (OuterVolumeSpecName: "scripts") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.595373 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.596230 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr" (OuterVolumeSpecName: "kube-api-access-q47fr") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "kube-api-access-q47fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.597171 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.600499 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "65fddfcc-6286-4146-8f8b-bb9e50ba2bee" (UID: "65fddfcc-6286-4146-8f8b-bb9e50ba2bee"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.690821 4784 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691178 4784 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691214 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q47fr\" (UniqueName: \"kubernetes.io/projected/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-kube-api-access-q47fr\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691231 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691245 4784 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691260 4784 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:21 crc kubenswrapper[4784]: I1205 12:44:21.691275 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fddfcc-6286-4146-8f8b-bb9e50ba2bee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.036784 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-5js9g"] Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.196594 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.196702 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 12:44:22 crc kubenswrapper[4784]: W1205 12:44:22.291310 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde71f05a_e844_4d80_bd5b_2e4169a624c4.slice/crio-192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d WatchSource:0}: Error finding container 192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d: Status 404 returned error can't find the container with id 192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.499756 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-5js9g" event={"ID":"de71f05a-e844-4d80-bd5b-2e4169a624c4","Type":"ContainerStarted","Data":"192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d"} Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.499966 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zbthx" Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.524282 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.563532 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-zbthx"] Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.574880 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-zbthx"] Dec 05 12:44:22 crc kubenswrapper[4784]: I1205 12:44:22.652244 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.010665 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65fddfcc-6286-4146-8f8b-bb9e50ba2bee" path="/var/lib/kubelet/pods/65fddfcc-6286-4146-8f8b-bb9e50ba2bee/volumes" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.833233 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-x99kz"] Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.834705 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.843649 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-01e4-account-create-update-x2qm9"] Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.845104 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.846988 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.859226 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-x99kz"] Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.869527 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-01e4-account-create-update-x2qm9"] Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.930115 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzmhr\" (UniqueName: \"kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.930384 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.930566 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pjth\" (UniqueName: \"kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:23 crc kubenswrapper[4784]: I1205 12:44:23.930628 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.032095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.032231 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzmhr\" (UniqueName: \"kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.032274 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.032366 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pjth\" (UniqueName: \"kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.033078 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.033353 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.034783 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-b8km7"] Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.035977 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.046624 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-b8km7"] Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.057540 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzmhr\" (UniqueName: \"kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr\") pod \"keystone-01e4-account-create-update-x2qm9\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.072320 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pjth\" (UniqueName: \"kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth\") pod \"keystone-db-create-x99kz\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.133606 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfscb\" (UniqueName: \"kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.133732 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.137536 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-88ae-account-create-update-ht5wv"] Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.138936 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.140804 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.147985 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-88ae-account-create-update-ht5wv"] Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.160369 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.172559 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.236042 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.236432 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh8hb\" (UniqueName: \"kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.236575 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.237287 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.237738 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfscb\" (UniqueName: \"kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.253708 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfscb\" (UniqueName: \"kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb\") pod \"placement-db-create-b8km7\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.339542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.339657 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh8hb\" (UniqueName: \"kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.340857 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.353115 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-b8km7" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.358757 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh8hb\" (UniqueName: \"kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb\") pod \"placement-88ae-account-create-update-ht5wv\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.458487 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:24 crc kubenswrapper[4784]: I1205 12:44:24.522874 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerStarted","Data":"91e146b11b7a754be41abfd3596f21157f44dae68d79af3757fc671008669720"} Dec 05 12:44:25 crc kubenswrapper[4784]: I1205 12:44:25.050199 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:25 crc kubenswrapper[4784]: E1205 12:44:25.050418 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:25 crc kubenswrapper[4784]: E1205 12:44:25.050667 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:25 crc kubenswrapper[4784]: E1205 12:44:25.050728 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:33.050711885 +0000 UTC m=+1152.470778690 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.094851 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-xwm49"] Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.098213 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.103934 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-xwm49"] Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.188010 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.200660 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-9959-account-create-update-5k2wn"] Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.201827 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.203631 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.212282 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-9959-account-create-update-5k2wn"] Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.268240 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.268288 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q92s2\" (UniqueName: \"kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.369422 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.369498 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q92s2\" (UniqueName: \"kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.369562 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.369652 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7fm2\" (UniqueName: \"kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.370280 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.383368 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.397336 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q92s2\" (UniqueName: \"kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2\") pod \"watcher-db-create-xwm49\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.427315 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.470940 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.471045 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7fm2\" (UniqueName: \"kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.471067 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.471741 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" containerID="cri-o://9d945cbd6f7ac6721b49c9ffdb5bd37281fdf713d0ce95a58009465c2e44b866" gracePeriod=10 Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.472130 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.490382 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7fm2\" (UniqueName: \"kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2\") pod \"watcher-9959-account-create-update-5k2wn\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:26 crc kubenswrapper[4784]: I1205 12:44:26.520524 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:27 crc kubenswrapper[4784]: I1205 12:44:27.553880 4784 generic.go:334] "Generic (PLEG): container finished" podID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerID="9d945cbd6f7ac6721b49c9ffdb5bd37281fdf713d0ce95a58009465c2e44b866" exitCode=0 Dec 05 12:44:27 crc kubenswrapper[4784]: I1205 12:44:27.553973 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" event={"ID":"8c6c365b-59eb-4668-9f15-42dcc18f87bd","Type":"ContainerDied","Data":"9d945cbd6f7ac6721b49c9ffdb5bd37281fdf713d0ce95a58009465c2e44b866"} Dec 05 12:44:29 crc kubenswrapper[4784]: I1205 12:44:29.572911 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:44:29 crc kubenswrapper[4784]: I1205 12:44:29.573411 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:44:30 crc kubenswrapper[4784]: I1205 12:44:30.856114 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: connect: connection refused" Dec 05 12:44:33 crc kubenswrapper[4784]: I1205 12:44:33.099242 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:33 crc kubenswrapper[4784]: E1205 12:44:33.099428 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:33 crc kubenswrapper[4784]: E1205 12:44:33.100239 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:33 crc kubenswrapper[4784]: E1205 12:44:33.100297 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:44:49.100278968 +0000 UTC m=+1168.520345793 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:35 crc kubenswrapper[4784]: I1205 12:44:35.154897 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-ljc2l" podUID="938888bc-6cef-410e-b517-9fdb0c824405" containerName="ovn-controller" probeResult="failure" output=< Dec 05 12:44:35 crc kubenswrapper[4784]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 12:44:35 crc kubenswrapper[4784]: > Dec 05 12:44:35 crc kubenswrapper[4784]: I1205 12:44:35.170917 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:44:35 crc kubenswrapper[4784]: E1205 12:44:35.351374 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest" Dec 05 12:44:35 crc kubenswrapper[4784]: E1205 12:44:35.351458 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest" Dec 05 12:44:35 crc kubenswrapper[4784]: E1205 12:44:35.351713 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:swift-ring-rebalance,Image:38.102.83.151:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest,Command:[/usr/local/bin/swift-ring-tool all],Args:[],WorkingDir:/etc/swift,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CM_NAME,Value:swift-ring-files,ValueFrom:nil,},EnvVar{Name:NAMESPACE,Value:openstack,ValueFrom:nil,},EnvVar{Name:OWNER_APIVERSION,Value:swift.openstack.org/v1beta1,ValueFrom:nil,},EnvVar{Name:OWNER_KIND,Value:SwiftRing,ValueFrom:nil,},EnvVar{Name:OWNER_NAME,Value:swift-ring,ValueFrom:nil,},EnvVar{Name:OWNER_UID,Value:6ae8d5e3-3e55-44d2-b58b-487be9773349,ValueFrom:nil,},EnvVar{Name:SWIFT_MIN_PART_HOURS,Value:1,ValueFrom:nil,},EnvVar{Name:SWIFT_PART_POWER,Value:10,ValueFrom:nil,},EnvVar{Name:SWIFT_REPLICAS,Value:1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/swift-ring-tool,SubPath:swift-ring-tool,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:swiftconf,ReadOnly:true,MountPath:/etc/swift/swift.conf,SubPath:swift.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ring-data-devices,ReadOnly:true,MountPath:/var/lib/config-data/ring-devices,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dispersionconf,ReadOnly:true,MountPath:/etc/swift/dispersion.conf,SubPath:dispersion.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c4l76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-ring-rebalance-5js9g_openstack(de71f05a-e844-4d80-bd5b-2e4169a624c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:44:35 crc kubenswrapper[4784]: E1205 12:44:35.353518 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/swift-ring-rebalance-5js9g" podUID="de71f05a-e844-4d80-bd5b-2e4169a624c4" Dec 05 12:44:35 crc kubenswrapper[4784]: E1205 12:44:35.642904 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest\\\"\"" pod="openstack/swift-ring-rebalance-5js9g" podUID="de71f05a-e844-4d80-bd5b-2e4169a624c4" Dec 05 12:44:35 crc kubenswrapper[4784]: I1205 12:44:35.922263 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-01e4-account-create-update-x2qm9"] Dec 05 12:44:36 crc kubenswrapper[4784]: E1205 12:44:36.751043 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4" Dec 05 12:44:36 crc kubenswrapper[4784]: E1205 12:44:36.751217 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:thanos-sidecar,Image:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,Command:[],Args:[sidecar --prometheus.url=http://localhost:9090/ --grpc-address=:10901 --http-address=:10902 --log.level=info --prometheus.http-client-file=/etc/thanos/config/prometheus.http-client-file.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:10902,Protocol:TCP,HostIP:,},ContainerPort{Name:grpc,HostPort:0,ContainerPort:10901,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:thanos-prometheus-http-client-file,ReadOnly:false,MountPath:/etc/thanos/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4c2bf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-metric-storage-0_openstack(80b02221-f73a-4535-b2d9-c203e5de2061): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 12:44:36 crc kubenswrapper[4784]: E1205 12:44:36.752424 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"thanos-sidecar\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" Dec 05 12:44:36 crc kubenswrapper[4784]: W1205 12:44:36.752879 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97d32e44_8c63_443a_b1b9_cc553a42c7dd.slice/crio-6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665 WatchSource:0}: Error finding container 6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665: Status 404 returned error can't find the container with id 6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665 Dec 05 12:44:36 crc kubenswrapper[4784]: I1205 12:44:36.979644 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.077913 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config\") pod \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.077982 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vt77\" (UniqueName: \"kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77\") pod \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.078012 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc\") pod \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.078137 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb\") pod \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\" (UID: \"8c6c365b-59eb-4668-9f15-42dcc18f87bd\") " Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.084354 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77" (OuterVolumeSpecName: "kube-api-access-2vt77") pod "8c6c365b-59eb-4668-9f15-42dcc18f87bd" (UID: "8c6c365b-59eb-4668-9f15-42dcc18f87bd"). InnerVolumeSpecName "kube-api-access-2vt77". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.127426 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c6c365b-59eb-4668-9f15-42dcc18f87bd" (UID: "8c6c365b-59eb-4668-9f15-42dcc18f87bd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.129146 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config" (OuterVolumeSpecName: "config") pod "8c6c365b-59eb-4668-9f15-42dcc18f87bd" (UID: "8c6c365b-59eb-4668-9f15-42dcc18f87bd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.158933 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8c6c365b-59eb-4668-9f15-42dcc18f87bd" (UID: "8c6c365b-59eb-4668-9f15-42dcc18f87bd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.183372 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.183408 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vt77\" (UniqueName: \"kubernetes.io/projected/8c6c365b-59eb-4668-9f15-42dcc18f87bd-kube-api-access-2vt77\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.183419 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.183428 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c6c365b-59eb-4668-9f15-42dcc18f87bd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.258803 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-b8km7"] Dec 05 12:44:37 crc kubenswrapper[4784]: W1205 12:44:37.265215 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff6438fd_45a7_4d2f_ae2c_a27a1a13ae83.slice/crio-63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91 WatchSource:0}: Error finding container 63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91: Status 404 returned error can't find the container with id 63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91 Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.342901 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-x99kz"] Dec 05 12:44:37 crc kubenswrapper[4784]: W1205 12:44:37.359201 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb06854d3_4d35_46bb_b79f_ef5482b07eba.slice/crio-6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7 WatchSource:0}: Error finding container 6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7: Status 404 returned error can't find the container with id 6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7 Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.438059 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-9959-account-create-update-5k2wn"] Dec 05 12:44:37 crc kubenswrapper[4784]: W1205 12:44:37.443511 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3f23d2c_8577_46a7_90f4_37e12278b111.slice/crio-29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb WatchSource:0}: Error finding container 29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb: Status 404 returned error can't find the container with id 29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.461150 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-xwm49"] Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.475761 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-88ae-account-create-update-ht5wv"] Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.657913 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-01e4-account-create-update-x2qm9" event={"ID":"97d32e44-8c63-443a-b1b9-cc553a42c7dd","Type":"ContainerStarted","Data":"6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.659354 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xwm49" event={"ID":"f3f23d2c-8577-46a7-90f4-37e12278b111","Type":"ContainerStarted","Data":"29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.661360 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" event={"ID":"8c6c365b-59eb-4668-9f15-42dcc18f87bd","Type":"ContainerDied","Data":"5b1fc0c7e66968a6d3e2edfe86efdcec1d4a57eb74ac874820731292deef4eaa"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.661413 4784 scope.go:117] "RemoveContainer" containerID="9d945cbd6f7ac6721b49c9ffdb5bd37281fdf713d0ce95a58009465c2e44b866" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.661587 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.663784 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-88ae-account-create-update-ht5wv" event={"ID":"e0c87cde-536a-436e-a823-50af03676501","Type":"ContainerStarted","Data":"e5418a9c9926bb0929d109485c7a9179f8433afc966cf5ca67eef515159fcd1a"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.664844 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9959-account-create-update-5k2wn" event={"ID":"a6099b41-be7d-4790-9f8c-3581d99ce48e","Type":"ContainerStarted","Data":"7d5aba3afd075a3cd64eb47d6a330f95b2accf1302c92a0cd181aba5e0a0da65"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.665920 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-b8km7" event={"ID":"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83","Type":"ContainerStarted","Data":"63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91"} Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.666761 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-x99kz" event={"ID":"b06854d3-4d35-46bb-b79f-ef5482b07eba","Type":"ContainerStarted","Data":"6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7"} Dec 05 12:44:37 crc kubenswrapper[4784]: E1205 12:44:37.667967 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"thanos-sidecar\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4\\\"\"" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.680520 4784 scope.go:117] "RemoveContainer" containerID="6c6476d6148bbf208b1f8335c3d8cb99674e2dba9c2a010d70f166e428641aae" Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.714402 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:37 crc kubenswrapper[4784]: I1205 12:44:37.720219 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-ccfb859df-n5vsw"] Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.678318 4784 generic.go:334] "Generic (PLEG): container finished" podID="97d32e44-8c63-443a-b1b9-cc553a42c7dd" containerID="dffa1009a27a348d4f855d55ba2b22811736eb53fab9b217ed86b0afe6194869" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.678392 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-01e4-account-create-update-x2qm9" event={"ID":"97d32e44-8c63-443a-b1b9-cc553a42c7dd","Type":"ContainerDied","Data":"dffa1009a27a348d4f855d55ba2b22811736eb53fab9b217ed86b0afe6194869"} Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.680666 4784 generic.go:334] "Generic (PLEG): container finished" podID="f3f23d2c-8577-46a7-90f4-37e12278b111" containerID="12601a7ddeed319eda128ab845fbde09254b66bb0dc49de473fdfa5dd463f26c" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.680743 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xwm49" event={"ID":"f3f23d2c-8577-46a7-90f4-37e12278b111","Type":"ContainerDied","Data":"12601a7ddeed319eda128ab845fbde09254b66bb0dc49de473fdfa5dd463f26c"} Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.684707 4784 generic.go:334] "Generic (PLEG): container finished" podID="e0c87cde-536a-436e-a823-50af03676501" containerID="594e5d2bca0815304adce02e83f13aa60ca80f4f338b64667fbe90e9279dd576" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.684761 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-88ae-account-create-update-ht5wv" event={"ID":"e0c87cde-536a-436e-a823-50af03676501","Type":"ContainerDied","Data":"594e5d2bca0815304adce02e83f13aa60ca80f4f338b64667fbe90e9279dd576"} Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.686387 4784 generic.go:334] "Generic (PLEG): container finished" podID="a6099b41-be7d-4790-9f8c-3581d99ce48e" containerID="10ca6a2bd934927607a83a8a991544af85cc05b07f5a305e7c9955b56c97fa3a" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.686455 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9959-account-create-update-5k2wn" event={"ID":"a6099b41-be7d-4790-9f8c-3581d99ce48e","Type":"ContainerDied","Data":"10ca6a2bd934927607a83a8a991544af85cc05b07f5a305e7c9955b56c97fa3a"} Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.688608 4784 generic.go:334] "Generic (PLEG): container finished" podID="ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" containerID="887fa74fe35e37d939f6e4d6202ed121b49fcf8e3d853e51cd05511c108f7e56" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.688671 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-b8km7" event={"ID":"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83","Type":"ContainerDied","Data":"887fa74fe35e37d939f6e4d6202ed121b49fcf8e3d853e51cd05511c108f7e56"} Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.691274 4784 generic.go:334] "Generic (PLEG): container finished" podID="b06854d3-4d35-46bb-b79f-ef5482b07eba" containerID="cfc9985e60fdb8705e5e2a3e48dde0147801fc5f9f5fd0efc8f20e441fe4218a" exitCode=0 Dec 05 12:44:38 crc kubenswrapper[4784]: I1205 12:44:38.691310 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-x99kz" event={"ID":"b06854d3-4d35-46bb-b79f-ef5482b07eba","Type":"ContainerDied","Data":"cfc9985e60fdb8705e5e2a3e48dde0147801fc5f9f5fd0efc8f20e441fe4218a"} Dec 05 12:44:39 crc kubenswrapper[4784]: I1205 12:44:39.012899 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" path="/var/lib/kubelet/pods/8c6c365b-59eb-4668-9f15-42dcc18f87bd/volumes" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.156479 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.167639 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-ljc2l" podUID="938888bc-6cef-410e-b517-9fdb0c824405" containerName="ovn-controller" probeResult="failure" output=< Dec 05 12:44:40 crc kubenswrapper[4784]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 12:44:40 crc kubenswrapper[4784]: > Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.185253 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-6s6n5" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.251048 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh8hb\" (UniqueName: \"kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb\") pod \"e0c87cde-536a-436e-a823-50af03676501\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.251142 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts\") pod \"e0c87cde-536a-436e-a823-50af03676501\" (UID: \"e0c87cde-536a-436e-a823-50af03676501\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.253769 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0c87cde-536a-436e-a823-50af03676501" (UID: "e0c87cde-536a-436e-a823-50af03676501"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.276608 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb" (OuterVolumeSpecName: "kube-api-access-zh8hb") pod "e0c87cde-536a-436e-a823-50af03676501" (UID: "e0c87cde-536a-436e-a823-50af03676501"). InnerVolumeSpecName "kube-api-access-zh8hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.359569 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh8hb\" (UniqueName: \"kubernetes.io/projected/e0c87cde-536a-436e-a823-50af03676501-kube-api-access-zh8hb\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.359593 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c87cde-536a-436e-a823-50af03676501-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.409493 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.459742 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-b8km7" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.460559 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pjth\" (UniqueName: \"kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth\") pod \"b06854d3-4d35-46bb-b79f-ef5482b07eba\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.460753 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts\") pod \"b06854d3-4d35-46bb-b79f-ef5482b07eba\" (UID: \"b06854d3-4d35-46bb-b79f-ef5482b07eba\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.469736 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b06854d3-4d35-46bb-b79f-ef5482b07eba" (UID: "b06854d3-4d35-46bb-b79f-ef5482b07eba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.472747 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth" (OuterVolumeSpecName: "kube-api-access-5pjth") pod "b06854d3-4d35-46bb-b79f-ef5482b07eba" (UID: "b06854d3-4d35-46bb-b79f-ef5482b07eba"). InnerVolumeSpecName "kube-api-access-5pjth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.477601 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.487290 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.515562 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.563760 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts\") pod \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.563818 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfscb\" (UniqueName: \"kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb\") pod \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564138 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts\") pod \"f3f23d2c-8577-46a7-90f4-37e12278b111\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564220 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q92s2\" (UniqueName: \"kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2\") pod \"f3f23d2c-8577-46a7-90f4-37e12278b111\" (UID: \"f3f23d2c-8577-46a7-90f4-37e12278b111\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564246 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts\") pod \"a6099b41-be7d-4790-9f8c-3581d99ce48e\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564278 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts\") pod \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\" (UID: \"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564320 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7fm2\" (UniqueName: \"kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2\") pod \"a6099b41-be7d-4790-9f8c-3581d99ce48e\" (UID: \"a6099b41-be7d-4790-9f8c-3581d99ce48e\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564362 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzmhr\" (UniqueName: \"kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr\") pod \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\" (UID: \"97d32e44-8c63-443a-b1b9-cc553a42c7dd\") " Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564660 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pjth\" (UniqueName: \"kubernetes.io/projected/b06854d3-4d35-46bb-b79f-ef5482b07eba-kube-api-access-5pjth\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.564673 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b06854d3-4d35-46bb-b79f-ef5482b07eba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.571042 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f3f23d2c-8577-46a7-90f4-37e12278b111" (UID: "f3f23d2c-8577-46a7-90f4-37e12278b111"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.571332 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" (UID: "ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.571554 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a6099b41-be7d-4790-9f8c-3581d99ce48e" (UID: "a6099b41-be7d-4790-9f8c-3581d99ce48e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.571775 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "97d32e44-8c63-443a-b1b9-cc553a42c7dd" (UID: "97d32e44-8c63-443a-b1b9-cc553a42c7dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.575114 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2" (OuterVolumeSpecName: "kube-api-access-q92s2") pod "f3f23d2c-8577-46a7-90f4-37e12278b111" (UID: "f3f23d2c-8577-46a7-90f4-37e12278b111"). InnerVolumeSpecName "kube-api-access-q92s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.593723 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb" (OuterVolumeSpecName: "kube-api-access-cfscb") pod "ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" (UID: "ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83"). InnerVolumeSpecName "kube-api-access-cfscb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.595989 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr" (OuterVolumeSpecName: "kube-api-access-xzmhr") pod "97d32e44-8c63-443a-b1b9-cc553a42c7dd" (UID: "97d32e44-8c63-443a-b1b9-cc553a42c7dd"). InnerVolumeSpecName "kube-api-access-xzmhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.610410 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2" (OuterVolumeSpecName: "kube-api-access-n7fm2") pod "a6099b41-be7d-4790-9f8c-3581d99ce48e" (UID: "a6099b41-be7d-4790-9f8c-3581d99ce48e"). InnerVolumeSpecName "kube-api-access-n7fm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.647953 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ljc2l-config-w5q27"] Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648373 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d32e44-8c63-443a-b1b9-cc553a42c7dd" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648387 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d32e44-8c63-443a-b1b9-cc553a42c7dd" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648401 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f23d2c-8577-46a7-90f4-37e12278b111" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648408 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f23d2c-8577-46a7-90f4-37e12278b111" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648416 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b06854d3-4d35-46bb-b79f-ef5482b07eba" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648424 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b06854d3-4d35-46bb-b79f-ef5482b07eba" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648445 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="init" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648454 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="init" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648471 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648478 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648496 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648502 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648514 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c87cde-536a-436e-a823-50af03676501" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648520 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c87cde-536a-436e-a823-50af03676501" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: E1205 12:44:40.648533 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6099b41-be7d-4790-9f8c-3581d99ce48e" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648539 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6099b41-be7d-4790-9f8c-3581d99ce48e" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648693 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6099b41-be7d-4790-9f8c-3581d99ce48e" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648709 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648715 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648729 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d32e44-8c63-443a-b1b9-cc553a42c7dd" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648736 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0c87cde-536a-436e-a823-50af03676501" containerName="mariadb-account-create-update" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648746 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3f23d2c-8577-46a7-90f4-37e12278b111" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.648754 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b06854d3-4d35-46bb-b79f-ef5482b07eba" containerName="mariadb-database-create" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.649351 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.654569 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.665568 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qws7j\" (UniqueName: \"kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.673147 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.673394 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.673620 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.673758 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.673913 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674059 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674128 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7fm2\" (UniqueName: \"kubernetes.io/projected/a6099b41-be7d-4790-9f8c-3581d99ce48e-kube-api-access-n7fm2\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674204 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzmhr\" (UniqueName: \"kubernetes.io/projected/97d32e44-8c63-443a-b1b9-cc553a42c7dd-kube-api-access-xzmhr\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674274 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97d32e44-8c63-443a-b1b9-cc553a42c7dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674335 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfscb\" (UniqueName: \"kubernetes.io/projected/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83-kube-api-access-cfscb\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674390 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3f23d2c-8577-46a7-90f4-37e12278b111-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674443 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q92s2\" (UniqueName: \"kubernetes.io/projected/f3f23d2c-8577-46a7-90f4-37e12278b111-kube-api-access-q92s2\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.674503 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6099b41-be7d-4790-9f8c-3581d99ce48e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.670910 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l-config-w5q27"] Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.708821 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-x99kz" event={"ID":"b06854d3-4d35-46bb-b79f-ef5482b07eba","Type":"ContainerDied","Data":"6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.708879 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6926e3eaa98f304d99fd04d1d8b7a4b794827593741e7a086f52646edfae51d7" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.708977 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-x99kz" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.710930 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-01e4-account-create-update-x2qm9" event={"ID":"97d32e44-8c63-443a-b1b9-cc553a42c7dd","Type":"ContainerDied","Data":"6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.710950 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bdf45cdc4e9054487ddb4134eefbf3c2c32706373a7551960c5f1edd24eb665" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.711008 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-01e4-account-create-update-x2qm9" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.716091 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xwm49" event={"ID":"f3f23d2c-8577-46a7-90f4-37e12278b111","Type":"ContainerDied","Data":"29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.716128 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29a38753181fe3112f6f2f450626a0111a805dbe6a04982e92338b9ac8130eeb" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.716182 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xwm49" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.731826 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-88ae-account-create-update-ht5wv" event={"ID":"e0c87cde-536a-436e-a823-50af03676501","Type":"ContainerDied","Data":"e5418a9c9926bb0929d109485c7a9179f8433afc966cf5ca67eef515159fcd1a"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.731863 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5418a9c9926bb0929d109485c7a9179f8433afc966cf5ca67eef515159fcd1a" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.731920 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-88ae-account-create-update-ht5wv" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.740033 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9959-account-create-update-5k2wn" event={"ID":"a6099b41-be7d-4790-9f8c-3581d99ce48e","Type":"ContainerDied","Data":"7d5aba3afd075a3cd64eb47d6a330f95b2accf1302c92a0cd181aba5e0a0da65"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.740133 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d5aba3afd075a3cd64eb47d6a330f95b2accf1302c92a0cd181aba5e0a0da65" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.740200 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9959-account-create-update-5k2wn" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.745383 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-b8km7" event={"ID":"ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83","Type":"ContainerDied","Data":"63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91"} Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.745419 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63713bd9c251dbe7b758a2efcf220c06e56efdcf52bd313ee5458fe997a33a91" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.745468 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-b8km7" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.775729 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.775789 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776160 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qws7j\" (UniqueName: \"kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776178 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776219 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776798 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776909 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.776956 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.777574 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.779113 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.795883 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qws7j\" (UniqueName: \"kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j\") pod \"ovn-controller-ljc2l-config-w5q27\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.856472 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-ccfb859df-n5vsw" podUID="8c6c365b-59eb-4668-9f15-42dcc18f87bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: i/o timeout" Dec 05 12:44:40 crc kubenswrapper[4784]: I1205 12:44:40.992687 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.454349 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l-config-w5q27"] Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.767225 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-w5q27" event={"ID":"a0357caa-4ba7-49d8-ba30-ea80ca11133c","Type":"ContainerStarted","Data":"b23172e4c3a28ab06d2ff002d6e45ce76a88d0a79da7b4ffabe93610c768b2ae"} Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.769485 4784 generic.go:334] "Generic (PLEG): container finished" podID="0a051f14-c8d2-4d57-95a9-9be7c46f9031" containerID="e949137b0e1c62a396337d8c7c4ca9ffa0bdff069c634228e646ac06f66e0447" exitCode=0 Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.769557 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"0a051f14-c8d2-4d57-95a9-9be7c46f9031","Type":"ContainerDied","Data":"e949137b0e1c62a396337d8c7c4ca9ffa0bdff069c634228e646ac06f66e0447"} Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.771954 4784 generic.go:334] "Generic (PLEG): container finished" podID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerID="22363b649441c80e6ba0100af0b361bd0e296e9bee6f6dfe1d164d8d100e3153" exitCode=0 Dec 05 12:44:41 crc kubenswrapper[4784]: I1205 12:44:41.772021 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerDied","Data":"22363b649441c80e6ba0100af0b361bd0e296e9bee6f6dfe1d164d8d100e3153"} Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.364517 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.365002 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.367173 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 12:44:42 crc kubenswrapper[4784]: E1205 12:44:42.367681 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"thanos-sidecar\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4\\\"\"" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.781854 4784 generic.go:334] "Generic (PLEG): container finished" podID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerID="ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce" exitCode=0 Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.781920 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerDied","Data":"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce"} Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.785392 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"0a051f14-c8d2-4d57-95a9-9be7c46f9031","Type":"ContainerStarted","Data":"791f1e3b43b372c25468af11236f00f3c0f3179207f684fd1b054d204962acb0"} Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.785928 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.787590 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerStarted","Data":"37011c59b3fd9ddce0a34429d3b2358d1770fa3a06b3677f682fba41637f0a5b"} Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.787748 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.789302 4784 generic.go:334] "Generic (PLEG): container finished" podID="a0357caa-4ba7-49d8-ba30-ea80ca11133c" containerID="a1d669560b9ce310a44a6087662efd06d842726a425f5a3285b0aedb4cd31110" exitCode=0 Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.790126 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-w5q27" event={"ID":"a0357caa-4ba7-49d8-ba30-ea80ca11133c","Type":"ContainerDied","Data":"a1d669560b9ce310a44a6087662efd06d842726a425f5a3285b0aedb4cd31110"} Dec 05 12:44:42 crc kubenswrapper[4784]: E1205 12:44:42.791254 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"thanos-sidecar\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4\\\"\"" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.792872 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.850721 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=38.678396411 podStartE2EDuration="1m23.850703781s" podCreationTimestamp="2025-12-05 12:43:19 +0000 UTC" firstStartedPulling="2025-12-05 12:43:21.205027444 +0000 UTC m=+1080.625094259" lastFinishedPulling="2025-12-05 12:44:06.377334814 +0000 UTC m=+1125.797401629" observedRunningTime="2025-12-05 12:44:42.840658777 +0000 UTC m=+1162.260725592" watchObservedRunningTime="2025-12-05 12:44:42.850703781 +0000 UTC m=+1162.270770586" Dec 05 12:44:42 crc kubenswrapper[4784]: I1205 12:44:42.882439 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=39.274952706 podStartE2EDuration="1m24.882422923s" podCreationTimestamp="2025-12-05 12:43:18 +0000 UTC" firstStartedPulling="2025-12-05 12:43:20.763534519 +0000 UTC m=+1080.183601334" lastFinishedPulling="2025-12-05 12:44:06.371004736 +0000 UTC m=+1125.791071551" observedRunningTime="2025-12-05 12:44:42.880230704 +0000 UTC m=+1162.300297529" watchObservedRunningTime="2025-12-05 12:44:42.882422923 +0000 UTC m=+1162.302489738" Dec 05 12:44:43 crc kubenswrapper[4784]: I1205 12:44:43.801365 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerStarted","Data":"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc"} Dec 05 12:44:43 crc kubenswrapper[4784]: E1205 12:44:43.804591 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"thanos-sidecar\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4\\\"\"" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" Dec 05 12:44:43 crc kubenswrapper[4784]: I1205 12:44:43.832887 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371951.021921 podStartE2EDuration="1m25.832855207s" podCreationTimestamp="2025-12-05 12:43:18 +0000 UTC" firstStartedPulling="2025-12-05 12:43:20.941531178 +0000 UTC m=+1080.361597993" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:43.828992927 +0000 UTC m=+1163.249059752" watchObservedRunningTime="2025-12-05 12:44:43.832855207 +0000 UTC m=+1163.252922062" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.242860 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349327 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qws7j\" (UniqueName: \"kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349368 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349403 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349497 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349544 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349540 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349571 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349630 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run" (OuterVolumeSpecName: "var-run") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.349690 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts\") pod \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\" (UID: \"a0357caa-4ba7-49d8-ba30-ea80ca11133c\") " Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.350218 4784 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.350232 4784 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.350241 4784 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a0357caa-4ba7-49d8-ba30-ea80ca11133c-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.350782 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.350946 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts" (OuterVolumeSpecName: "scripts") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.355568 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j" (OuterVolumeSpecName: "kube-api-access-qws7j") pod "a0357caa-4ba7-49d8-ba30-ea80ca11133c" (UID: "a0357caa-4ba7-49d8-ba30-ea80ca11133c"). InnerVolumeSpecName "kube-api-access-qws7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.451419 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.451450 4784 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a0357caa-4ba7-49d8-ba30-ea80ca11133c-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.451460 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qws7j\" (UniqueName: \"kubernetes.io/projected/a0357caa-4ba7-49d8-ba30-ea80ca11133c-kube-api-access-qws7j\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.814068 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-w5q27" event={"ID":"a0357caa-4ba7-49d8-ba30-ea80ca11133c","Type":"ContainerDied","Data":"b23172e4c3a28ab06d2ff002d6e45ce76a88d0a79da7b4ffabe93610c768b2ae"} Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.814108 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b23172e4c3a28ab06d2ff002d6e45ce76a88d0a79da7b4ffabe93610c768b2ae" Dec 05 12:44:44 crc kubenswrapper[4784]: I1205 12:44:44.814170 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-w5q27" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.157817 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ljc2l" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.344590 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ljc2l-config-w5q27"] Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.351239 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ljc2l-config-w5q27"] Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.378705 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ljc2l-config-hrbzh"] Dec 05 12:44:45 crc kubenswrapper[4784]: E1205 12:44:45.379049 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0357caa-4ba7-49d8-ba30-ea80ca11133c" containerName="ovn-config" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.379064 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0357caa-4ba7-49d8-ba30-ea80ca11133c" containerName="ovn-config" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.379241 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0357caa-4ba7-49d8-ba30-ea80ca11133c" containerName="ovn-config" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.379771 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.381929 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.399816 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l-config-hrbzh"] Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468574 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468643 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468717 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468802 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468832 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.468865 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58kjx\" (UniqueName: \"kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571649 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571752 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571824 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571857 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571884 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58kjx\" (UniqueName: \"kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.571911 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.572049 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.572118 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.573838 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.574021 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.574125 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.591601 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58kjx\" (UniqueName: \"kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx\") pod \"ovn-controller-ljc2l-config-hrbzh\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:45 crc kubenswrapper[4784]: I1205 12:44:45.695398 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:46 crc kubenswrapper[4784]: I1205 12:44:46.194960 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ljc2l-config-hrbzh"] Dec 05 12:44:46 crc kubenswrapper[4784]: I1205 12:44:46.835325 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-hrbzh" event={"ID":"d9131e56-7c3e-4430-a114-44d88a6db064","Type":"ContainerStarted","Data":"587ae8b4ecbbf8c3a53dc103d722bdef7bb03047d9e1a3c4441922544763350f"} Dec 05 12:44:46 crc kubenswrapper[4784]: I1205 12:44:46.835684 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-hrbzh" event={"ID":"d9131e56-7c3e-4430-a114-44d88a6db064","Type":"ContainerStarted","Data":"16d3de7e854bd99cdfcb6628addf653bd96a94bfbb70088c367abfbcb0112c15"} Dec 05 12:44:46 crc kubenswrapper[4784]: I1205 12:44:46.860588 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ljc2l-config-hrbzh" podStartSLOduration=1.860571347 podStartE2EDuration="1.860571347s" podCreationTimestamp="2025-12-05 12:44:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:44:46.856560221 +0000 UTC m=+1166.276627036" watchObservedRunningTime="2025-12-05 12:44:46.860571347 +0000 UTC m=+1166.280638152" Dec 05 12:44:47 crc kubenswrapper[4784]: I1205 12:44:47.013655 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0357caa-4ba7-49d8-ba30-ea80ca11133c" path="/var/lib/kubelet/pods/a0357caa-4ba7-49d8-ba30-ea80ca11133c/volumes" Dec 05 12:44:47 crc kubenswrapper[4784]: I1205 12:44:47.843397 4784 generic.go:334] "Generic (PLEG): container finished" podID="d9131e56-7c3e-4430-a114-44d88a6db064" containerID="587ae8b4ecbbf8c3a53dc103d722bdef7bb03047d9e1a3c4441922544763350f" exitCode=0 Dec 05 12:44:47 crc kubenswrapper[4784]: I1205 12:44:47.843443 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-hrbzh" event={"ID":"d9131e56-7c3e-4430-a114-44d88a6db064","Type":"ContainerDied","Data":"587ae8b4ecbbf8c3a53dc103d722bdef7bb03047d9e1a3c4441922544763350f"} Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.131661 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:44:49 crc kubenswrapper[4784]: E1205 12:44:49.132828 4784 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 12:44:49 crc kubenswrapper[4784]: E1205 12:44:49.132846 4784 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 12:44:49 crc kubenswrapper[4784]: E1205 12:44:49.132915 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift podName:f34e93a8-02d9-44ef-a18e-13ce24c3f9a6 nodeName:}" failed. No retries permitted until 2025-12-05 12:45:21.132897426 +0000 UTC m=+1200.552964321 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift") pod "swift-storage-0" (UID: "f34e93a8-02d9-44ef-a18e-13ce24c3f9a6") : configmap "swift-ring-files" not found Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.280281 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.334911 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335037 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335061 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335061 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58kjx\" (UniqueName: \"kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335163 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335219 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335266 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts\") pod \"d9131e56-7c3e-4430-a114-44d88a6db064\" (UID: \"d9131e56-7c3e-4430-a114-44d88a6db064\") " Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335252 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run" (OuterVolumeSpecName: "var-run") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335661 4784 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335676 4784 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.335686 4784 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9131e56-7c3e-4430-a114-44d88a6db064-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.336094 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.336399 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts" (OuterVolumeSpecName: "scripts") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.343373 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx" (OuterVolumeSpecName: "kube-api-access-58kjx") pod "d9131e56-7c3e-4430-a114-44d88a6db064" (UID: "d9131e56-7c3e-4430-a114-44d88a6db064"). InnerVolumeSpecName "kube-api-access-58kjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.437179 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.437224 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58kjx\" (UniqueName: \"kubernetes.io/projected/d9131e56-7c3e-4430-a114-44d88a6db064-kube-api-access-58kjx\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.437238 4784 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131e56-7c3e-4430-a114-44d88a6db064-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.875376 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ljc2l-config-hrbzh" event={"ID":"d9131e56-7c3e-4430-a114-44d88a6db064","Type":"ContainerDied","Data":"16d3de7e854bd99cdfcb6628addf653bd96a94bfbb70088c367abfbcb0112c15"} Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.875439 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16d3de7e854bd99cdfcb6628addf653bd96a94bfbb70088c367abfbcb0112c15" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.875472 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ljc2l-config-hrbzh" Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.942674 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ljc2l-config-hrbzh"] Dec 05 12:44:49 crc kubenswrapper[4784]: I1205 12:44:49.949353 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ljc2l-config-hrbzh"] Dec 05 12:44:50 crc kubenswrapper[4784]: I1205 12:44:50.204711 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:44:50 crc kubenswrapper[4784]: I1205 12:44:50.884493 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-5js9g" event={"ID":"de71f05a-e844-4d80-bd5b-2e4169a624c4","Type":"ContainerStarted","Data":"a4287e6a37925ced6e13577bf680aa7eeacb743f4dd03145d6ce64094de22fed"} Dec 05 12:44:50 crc kubenswrapper[4784]: I1205 12:44:50.922882 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-5js9g" podStartSLOduration=2.139290587 podStartE2EDuration="29.922860361s" podCreationTimestamp="2025-12-05 12:44:21 +0000 UTC" firstStartedPulling="2025-12-05 12:44:22.295507484 +0000 UTC m=+1141.715574339" lastFinishedPulling="2025-12-05 12:44:50.079077298 +0000 UTC m=+1169.499144113" observedRunningTime="2025-12-05 12:44:50.915855281 +0000 UTC m=+1170.335922116" watchObservedRunningTime="2025-12-05 12:44:50.922860361 +0000 UTC m=+1170.342927176" Dec 05 12:44:51 crc kubenswrapper[4784]: I1205 12:44:51.008458 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9131e56-7c3e-4430-a114-44d88a6db064" path="/var/lib/kubelet/pods/d9131e56-7c3e-4430-a114-44d88a6db064/volumes" Dec 05 12:44:57 crc kubenswrapper[4784]: I1205 12:44:57.944752 4784 generic.go:334] "Generic (PLEG): container finished" podID="de71f05a-e844-4d80-bd5b-2e4169a624c4" containerID="a4287e6a37925ced6e13577bf680aa7eeacb743f4dd03145d6ce64094de22fed" exitCode=0 Dec 05 12:44:57 crc kubenswrapper[4784]: I1205 12:44:57.944909 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-5js9g" event={"ID":"de71f05a-e844-4d80-bd5b-2e4169a624c4","Type":"ContainerDied","Data":"a4287e6a37925ced6e13577bf680aa7eeacb743f4dd03145d6ce64094de22fed"} Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.278858 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319588 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319662 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319689 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319854 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319897 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.319964 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.320015 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4l76\" (UniqueName: \"kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76\") pod \"de71f05a-e844-4d80-bd5b-2e4169a624c4\" (UID: \"de71f05a-e844-4d80-bd5b-2e4169a624c4\") " Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.342603 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.342917 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.353524 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76" (OuterVolumeSpecName: "kube-api-access-c4l76") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "kube-api-access-c4l76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.353764 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.375452 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.376157 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts" (OuterVolumeSpecName: "scripts") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.382496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "de71f05a-e844-4d80-bd5b-2e4169a624c4" (UID: "de71f05a-e844-4d80-bd5b-2e4169a624c4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421885 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421912 4784 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/de71f05a-e844-4d80-bd5b-2e4169a624c4-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421921 4784 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421929 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4l76\" (UniqueName: \"kubernetes.io/projected/de71f05a-e844-4d80-bd5b-2e4169a624c4-kube-api-access-c4l76\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421939 4784 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421949 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de71f05a-e844-4d80-bd5b-2e4169a624c4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.421956 4784 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/de71f05a-e844-4d80-bd5b-2e4169a624c4-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.572587 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.572653 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.572691 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.573316 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.573374 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b" gracePeriod=600 Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.970957 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b" exitCode=0 Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.971048 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b"} Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.971287 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709"} Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.971310 4784 scope.go:117] "RemoveContainer" containerID="11e4eb9d74cf9c3812a84422c53f6f066a9bf23a80067c974429c0fabba67997" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.973513 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-5js9g" event={"ID":"de71f05a-e844-4d80-bd5b-2e4169a624c4","Type":"ContainerDied","Data":"192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d"} Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.973539 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="192f3e3abe459825b751e5b3d896e975e94c48a69f3c411354dbd032d06a562d" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.973596 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-5js9g" Dec 05 12:44:59 crc kubenswrapper[4784]: I1205 12:44:59.978127 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerStarted","Data":"c72796d7f99eecf954fb94d594965c360f5801b5cb51e174051abbb7e1bc7b1d"} Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.043060 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=30.846746997 podStartE2EDuration="1m35.043033384s" podCreationTimestamp="2025-12-05 12:43:25 +0000 UTC" firstStartedPulling="2025-12-05 12:43:54.819995737 +0000 UTC m=+1114.240062552" lastFinishedPulling="2025-12-05 12:44:59.016282114 +0000 UTC m=+1178.436348939" observedRunningTime="2025-12-05 12:45:00.036519851 +0000 UTC m=+1179.456586666" watchObservedRunningTime="2025-12-05 12:45:00.043033384 +0000 UTC m=+1179.463100199" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.158904 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd"] Dec 05 12:45:00 crc kubenswrapper[4784]: E1205 12:45:00.159585 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de71f05a-e844-4d80-bd5b-2e4169a624c4" containerName="swift-ring-rebalance" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.159614 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="de71f05a-e844-4d80-bd5b-2e4169a624c4" containerName="swift-ring-rebalance" Dec 05 12:45:00 crc kubenswrapper[4784]: E1205 12:45:00.159642 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9131e56-7c3e-4430-a114-44d88a6db064" containerName="ovn-config" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.159652 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9131e56-7c3e-4430-a114-44d88a6db064" containerName="ovn-config" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.159876 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9131e56-7c3e-4430-a114-44d88a6db064" containerName="ovn-config" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.159902 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="de71f05a-e844-4d80-bd5b-2e4169a624c4" containerName="swift-ring-rebalance" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.160684 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.166915 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.167174 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.192029 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.202993 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.205135 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.245817 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.245909 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.246062 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgw88\" (UniqueName: \"kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.347580 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.347683 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgw88\" (UniqueName: \"kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.347748 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.352695 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.370871 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.371468 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgw88\" (UniqueName: \"kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88\") pod \"collect-profiles-29415645-2s9dd\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.496355 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.514012 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.610371 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-dnwsj"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.611437 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.643557 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-dnwsj"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.654620 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l54v\" (UniqueName: \"kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.654687 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.685668 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-84546"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.686764 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.751474 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-7f4f-account-create-update-7mrkg"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.758506 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.760125 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfhdp\" (UniqueName: \"kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.760219 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.760251 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l54v\" (UniqueName: \"kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.760276 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.760954 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.763675 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.769230 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-84546"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.777137 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-7f4f-account-create-update-7mrkg"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.793595 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l54v\" (UniqueName: \"kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v\") pod \"barbican-db-create-dnwsj\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.862263 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfhdp\" (UniqueName: \"kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.862339 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.862399 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.862464 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjxrf\" (UniqueName: \"kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.863356 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.886550 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfhdp\" (UniqueName: \"kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp\") pod \"cinder-db-create-84546\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " pod="openstack/cinder-db-create-84546" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.892617 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-657f-account-create-update-w28tq"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.893648 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.895639 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.920851 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-657f-account-create-update-w28tq"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.943358 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.961828 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gkqx2"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.967864 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.973988 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.974393 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lkxwc" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.974625 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.974807 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.977004 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gkqx2"] Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.983280 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.983465 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt7zc\" (UniqueName: \"kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.983592 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjxrf\" (UniqueName: \"kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.983626 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:00 crc kubenswrapper[4784]: I1205 12:45:00.984431 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.009218 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-84546" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.016956 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjxrf\" (UniqueName: \"kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf\") pod \"barbican-7f4f-account-create-update-7mrkg\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.086157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt7zc\" (UniqueName: \"kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.086289 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.086331 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.086368 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm22m\" (UniqueName: \"kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.086395 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.087890 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.106713 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.112854 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt7zc\" (UniqueName: \"kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc\") pod \"cinder-657f-account-create-update-w28tq\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.144166 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd"] Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.195388 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.195435 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm22m\" (UniqueName: \"kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.195460 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.206542 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.207847 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.213235 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm22m\" (UniqueName: \"kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m\") pod \"keystone-db-sync-gkqx2\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.213901 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.298486 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.597490 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-dnwsj"] Dec 05 12:45:01 crc kubenswrapper[4784]: W1205 12:45:01.602248 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf5a977b_4e06_442e_90cc_ccb94cf0929b.slice/crio-1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4 WatchSource:0}: Error finding container 1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4: Status 404 returned error can't find the container with id 1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4 Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.684888 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-84546"] Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.838967 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gkqx2"] Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.847411 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-657f-account-create-update-w28tq"] Dec 05 12:45:01 crc kubenswrapper[4784]: I1205 12:45:01.857735 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-7f4f-account-create-update-7mrkg"] Dec 05 12:45:01 crc kubenswrapper[4784]: W1205 12:45:01.871157 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc12a9ba_c259_43c6_ac41_c3c0bc2ac019.slice/crio-28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826 WatchSource:0}: Error finding container 28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826: Status 404 returned error can't find the container with id 28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826 Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.033391 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkqx2" event={"ID":"ffc17758-4586-495d-842c-68c47898b1c1","Type":"ContainerStarted","Data":"0ad33ac8a5994d1772cf49197fc36582724959c46fb86d61c67608ba878c1f63"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.047481 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-7f4f-account-create-update-7mrkg" event={"ID":"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019","Type":"ContainerStarted","Data":"28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.050246 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dnwsj" event={"ID":"df5a977b-4e06-442e-90cc-ccb94cf0929b","Type":"ContainerStarted","Data":"5d722700524a5a2acca8a637f339e3c6cbecc372730f62cb7ef7dc163c7fd4ee"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.050291 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dnwsj" event={"ID":"df5a977b-4e06-442e-90cc-ccb94cf0929b","Type":"ContainerStarted","Data":"1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.057149 4784 generic.go:334] "Generic (PLEG): container finished" podID="13e0a0c7-4992-40a3-8835-7163421c900d" containerID="7257666f60b1b74ab9e6db2a256cf4aecbfe85562cfc50288182a9f960f76db2" exitCode=0 Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.057237 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" event={"ID":"13e0a0c7-4992-40a3-8835-7163421c900d","Type":"ContainerDied","Data":"7257666f60b1b74ab9e6db2a256cf4aecbfe85562cfc50288182a9f960f76db2"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.057261 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" event={"ID":"13e0a0c7-4992-40a3-8835-7163421c900d","Type":"ContainerStarted","Data":"2415e0525263a3b29b496ad9cb4e2a3671c4c672d8b52f0e5b329d704a96841c"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.062665 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-84546" event={"ID":"7fb37670-5391-491a-9141-a113fdbd8650","Type":"ContainerStarted","Data":"792a414ae5a7e7d46b0e071f1b1e4927fef66fd095c62783b2c690e04e47fe1c"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.062705 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-84546" event={"ID":"7fb37670-5391-491a-9141-a113fdbd8650","Type":"ContainerStarted","Data":"94d2f163a644b8aea4242ecf2ea6233b963f034800076ab5c0c9b50cc534847d"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.075078 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-dnwsj" podStartSLOduration=2.075060343 podStartE2EDuration="2.075060343s" podCreationTimestamp="2025-12-05 12:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:02.069675525 +0000 UTC m=+1181.489742330" watchObservedRunningTime="2025-12-05 12:45:02.075060343 +0000 UTC m=+1181.495127158" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.080394 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-657f-account-create-update-w28tq" event={"ID":"1985ab81-9425-43c9-9395-2efcdd2b336d","Type":"ContainerStarted","Data":"cb1733c0d06b35767e055f3a07124f1064c0584482e153c809f643ed32198cff"} Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.090006 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-84546" podStartSLOduration=2.08998518 podStartE2EDuration="2.08998518s" podCreationTimestamp="2025-12-05 12:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:02.084589872 +0000 UTC m=+1181.504656697" watchObservedRunningTime="2025-12-05 12:45:02.08998518 +0000 UTC m=+1181.510051985" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.135437 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-657f-account-create-update-w28tq" podStartSLOduration=2.135420731 podStartE2EDuration="2.135420731s" podCreationTimestamp="2025-12-05 12:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:02.130028612 +0000 UTC m=+1181.550095427" watchObservedRunningTime="2025-12-05 12:45:02.135420731 +0000 UTC m=+1181.555487546" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.420156 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-8rhvb"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.421198 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.422816 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-gdqkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.422974 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.435675 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-8rhvb"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.486553 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-xmxxf"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.487748 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.494612 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-xmxxf"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.519863 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k6l9\" (UniqueName: \"kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.520203 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.520256 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.520293 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.597674 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5940-account-create-update-9bnkr"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.598841 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.604414 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5940-account-create-update-9bnkr"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.611821 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624329 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624403 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmwq4\" (UniqueName: \"kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624490 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k6l9\" (UniqueName: \"kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624591 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.624629 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.631801 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.633168 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.636641 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.651588 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k6l9\" (UniqueName: \"kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9\") pod \"watcher-db-sync-8rhvb\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.714317 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-bc4gb"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.716106 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.726372 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.726647 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmwq4\" (UniqueName: \"kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.726776 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jztvg\" (UniqueName: \"kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.726935 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.728948 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.733776 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bc4gb"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.774319 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmwq4\" (UniqueName: \"kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4\") pod \"glance-db-create-xmxxf\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.797831 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b15b-account-create-update-9zgxk"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.798986 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.802045 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.802913 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.810361 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.811296 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b15b-account-create-update-9zgxk"] Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.829141 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzxlc\" (UniqueName: \"kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.829409 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jztvg\" (UniqueName: \"kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.829471 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.829509 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.830351 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.845122 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jztvg\" (UniqueName: \"kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg\") pod \"glance-5940-account-create-update-9bnkr\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.901336 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.934787 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkgmd\" (UniqueName: \"kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.946174 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.948050 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.949038 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzxlc\" (UniqueName: \"kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.950536 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:02 crc kubenswrapper[4784]: I1205 12:45:02.972646 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzxlc\" (UniqueName: \"kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc\") pod \"neutron-db-create-bc4gb\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.052718 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkgmd\" (UniqueName: \"kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.052884 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.054278 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.079098 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkgmd\" (UniqueName: \"kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd\") pod \"neutron-b15b-account-create-update-9zgxk\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.083071 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-8rhvb"] Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.092526 4784 generic.go:334] "Generic (PLEG): container finished" podID="1985ab81-9425-43c9-9395-2efcdd2b336d" containerID="9252f9844134d6ffdf9ed49ab4709bca71ea8a3a9595b8dc724bf08dd0f48a79" exitCode=0 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.092566 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-657f-account-create-update-w28tq" event={"ID":"1985ab81-9425-43c9-9395-2efcdd2b336d","Type":"ContainerDied","Data":"9252f9844134d6ffdf9ed49ab4709bca71ea8a3a9595b8dc724bf08dd0f48a79"} Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.093956 4784 generic.go:334] "Generic (PLEG): container finished" podID="fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" containerID="a4d9a80bb0266730a2fc5857ab6ad8e4a0e60c4c5da7d2a71152769cd188fdd2" exitCode=0 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.094327 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-7f4f-account-create-update-7mrkg" event={"ID":"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019","Type":"ContainerDied","Data":"a4d9a80bb0266730a2fc5857ab6ad8e4a0e60c4c5da7d2a71152769cd188fdd2"} Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.096681 4784 generic.go:334] "Generic (PLEG): container finished" podID="df5a977b-4e06-442e-90cc-ccb94cf0929b" containerID="5d722700524a5a2acca8a637f339e3c6cbecc372730f62cb7ef7dc163c7fd4ee" exitCode=0 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.096713 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dnwsj" event={"ID":"df5a977b-4e06-442e-90cc-ccb94cf0929b","Type":"ContainerDied","Data":"5d722700524a5a2acca8a637f339e3c6cbecc372730f62cb7ef7dc163c7fd4ee"} Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.098129 4784 generic.go:334] "Generic (PLEG): container finished" podID="7fb37670-5391-491a-9141-a113fdbd8650" containerID="792a414ae5a7e7d46b0e071f1b1e4927fef66fd095c62783b2c690e04e47fe1c" exitCode=0 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.098272 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-84546" event={"ID":"7fb37670-5391-491a-9141-a113fdbd8650","Type":"ContainerDied","Data":"792a414ae5a7e7d46b0e071f1b1e4927fef66fd095c62783b2c690e04e47fe1c"} Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.236066 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.244098 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.345862 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.346397 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="prometheus" containerID="cri-o://b6ea72f4c0636fa6fcf55d4829102b8db5d000b56c5326bf0e7cb7adb514c10c" gracePeriod=600 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.346631 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="thanos-sidecar" containerID="cri-o://c72796d7f99eecf954fb94d594965c360f5801b5cb51e174051abbb7e1bc7b1d" gracePeriod=600 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.346677 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="config-reloader" containerID="cri-o://91e146b11b7a754be41abfd3596f21157f44dae68d79af3757fc671008669720" gracePeriod=600 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.501755 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.537788 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-xmxxf"] Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.655359 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5940-account-create-update-9bnkr"] Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.668636 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume\") pod \"13e0a0c7-4992-40a3-8835-7163421c900d\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.670040 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume\") pod \"13e0a0c7-4992-40a3-8835-7163421c900d\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.670067 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgw88\" (UniqueName: \"kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88\") pod \"13e0a0c7-4992-40a3-8835-7163421c900d\" (UID: \"13e0a0c7-4992-40a3-8835-7163421c900d\") " Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.670778 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume" (OuterVolumeSpecName: "config-volume") pod "13e0a0c7-4992-40a3-8835-7163421c900d" (UID: "13e0a0c7-4992-40a3-8835-7163421c900d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.678280 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "13e0a0c7-4992-40a3-8835-7163421c900d" (UID: "13e0a0c7-4992-40a3-8835-7163421c900d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.681564 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88" (OuterVolumeSpecName: "kube-api-access-hgw88") pod "13e0a0c7-4992-40a3-8835-7163421c900d" (UID: "13e0a0c7-4992-40a3-8835-7163421c900d"). InnerVolumeSpecName "kube-api-access-hgw88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.772416 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13e0a0c7-4992-40a3-8835-7163421c900d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.772449 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13e0a0c7-4992-40a3-8835-7163421c900d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.772459 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgw88\" (UniqueName: \"kubernetes.io/projected/13e0a0c7-4992-40a3-8835-7163421c900d-kube-api-access-hgw88\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.815720 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bc4gb"] Dec 05 12:45:03 crc kubenswrapper[4784]: W1205 12:45:03.820064 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22340d7d_4dcb_4d3e_b7b8_9e388587b3aa.slice/crio-c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0 WatchSource:0}: Error finding container c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0: Status 404 returned error can't find the container with id c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0 Dec 05 12:45:03 crc kubenswrapper[4784]: I1205 12:45:03.900486 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b15b-account-create-update-9zgxk"] Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.146033 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5940-account-create-update-9bnkr" event={"ID":"9ff23b53-bbeb-467c-9d74-713682fa23a4","Type":"ContainerStarted","Data":"7f8ceb121e8b5f4a850cd12af9c3e210962322abb391083b3945f3c71f2721b7"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.146341 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5940-account-create-update-9bnkr" event={"ID":"9ff23b53-bbeb-467c-9d74-713682fa23a4","Type":"ContainerStarted","Data":"f430b943196dc941aa8ccdc36a6e1b794e7978c84cb36f2a41b683d22d8be20a"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.154552 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" event={"ID":"13e0a0c7-4992-40a3-8835-7163421c900d","Type":"ContainerDied","Data":"2415e0525263a3b29b496ad9cb4e2a3671c4c672d8b52f0e5b329d704a96841c"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.154583 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2415e0525263a3b29b496ad9cb4e2a3671c4c672d8b52f0e5b329d704a96841c" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.154659 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.165920 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-5940-account-create-update-9bnkr" podStartSLOduration=2.165904702 podStartE2EDuration="2.165904702s" podCreationTimestamp="2025-12-05 12:45:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:04.165360295 +0000 UTC m=+1183.585427110" watchObservedRunningTime="2025-12-05 12:45:04.165904702 +0000 UTC m=+1183.585971557" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172493 4784 generic.go:334] "Generic (PLEG): container finished" podID="80b02221-f73a-4535-b2d9-c203e5de2061" containerID="c72796d7f99eecf954fb94d594965c360f5801b5cb51e174051abbb7e1bc7b1d" exitCode=0 Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172530 4784 generic.go:334] "Generic (PLEG): container finished" podID="80b02221-f73a-4535-b2d9-c203e5de2061" containerID="91e146b11b7a754be41abfd3596f21157f44dae68d79af3757fc671008669720" exitCode=0 Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172540 4784 generic.go:334] "Generic (PLEG): container finished" podID="80b02221-f73a-4535-b2d9-c203e5de2061" containerID="b6ea72f4c0636fa6fcf55d4829102b8db5d000b56c5326bf0e7cb7adb514c10c" exitCode=0 Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172627 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerDied","Data":"c72796d7f99eecf954fb94d594965c360f5801b5cb51e174051abbb7e1bc7b1d"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172664 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerDied","Data":"91e146b11b7a754be41abfd3596f21157f44dae68d79af3757fc671008669720"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.172678 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerDied","Data":"b6ea72f4c0636fa6fcf55d4829102b8db5d000b56c5326bf0e7cb7adb514c10c"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.186800 4784 generic.go:334] "Generic (PLEG): container finished" podID="39de58d0-af8e-4869-8f7c-34c94a412b1a" containerID="366ac7830f6ed494c3ba9244c6f9804d3ca8a735a17623f227023de7fa89790c" exitCode=0 Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.186895 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-xmxxf" event={"ID":"39de58d0-af8e-4869-8f7c-34c94a412b1a","Type":"ContainerDied","Data":"366ac7830f6ed494c3ba9244c6f9804d3ca8a735a17623f227023de7fa89790c"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.186922 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-xmxxf" event={"ID":"39de58d0-af8e-4869-8f7c-34c94a412b1a","Type":"ContainerStarted","Data":"1012171a971829c52adb39030c6a1de632fe63558790f89047f9abd31370c52d"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.201028 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-8rhvb" event={"ID":"44208e80-4774-4f47-93a8-6e23b7402949","Type":"ContainerStarted","Data":"b94ba3bd2bb106a3bb79fd836c8a92c961990c3a51a7749e969225a6f09f73dd"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.206164 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bc4gb" event={"ID":"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa","Type":"ContainerStarted","Data":"9ba8edb6af990a5c3afa3fba997d46f949d07f6c3f4b12dd5335de31d74b31ef"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.206428 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bc4gb" event={"ID":"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa","Type":"ContainerStarted","Data":"c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.216806 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b15b-account-create-update-9zgxk" event={"ID":"c6b3acb2-88da-4560-9648-ee6d3a5d6d60","Type":"ContainerStarted","Data":"60b3c15bb993555975059ae32e06a6d5526bdaa264e716bdd2713eab3b46a248"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.216943 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b15b-account-create-update-9zgxk" event={"ID":"c6b3acb2-88da-4560-9648-ee6d3a5d6d60","Type":"ContainerStarted","Data":"0d3aa3d9cdbb4d67728cb146306a49a8ed694fe190f6f5d72b5760298cc1edcd"} Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.228563 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-bc4gb" podStartSLOduration=2.228546369 podStartE2EDuration="2.228546369s" podCreationTimestamp="2025-12-05 12:45:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:04.227351713 +0000 UTC m=+1183.647418528" watchObservedRunningTime="2025-12-05 12:45:04.228546369 +0000 UTC m=+1183.648613184" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.253944 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-b15b-account-create-update-9zgxk" podStartSLOduration=2.253928883 podStartE2EDuration="2.253928883s" podCreationTimestamp="2025-12-05 12:45:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:04.247289356 +0000 UTC m=+1183.667356171" watchObservedRunningTime="2025-12-05 12:45:04.253928883 +0000 UTC m=+1183.673995698" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.387640 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.486502 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487646 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487727 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487776 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c2bf\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487859 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487934 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487975 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.487998 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets\") pod \"80b02221-f73a-4535-b2d9-c203e5de2061\" (UID: \"80b02221-f73a-4535-b2d9-c203e5de2061\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.493672 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.500667 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config" (OuterVolumeSpecName: "config") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.501022 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.501027 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf" (OuterVolumeSpecName: "kube-api-access-4c2bf") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "kube-api-access-4c2bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.502907 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out" (OuterVolumeSpecName: "config-out") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.523913 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.531018 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.570072 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config" (OuterVolumeSpecName: "web-config") pod "80b02221-f73a-4535-b2d9-c203e5de2061" (UID: "80b02221-f73a-4535-b2d9-c203e5de2061"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590738 4784 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590794 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") on node \"crc\" " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590808 4784 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/80b02221-f73a-4535-b2d9-c203e5de2061-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590819 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c2bf\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-kube-api-access-4c2bf\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590831 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590839 4784 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/80b02221-f73a-4535-b2d9-c203e5de2061-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590848 4784 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/80b02221-f73a-4535-b2d9-c203e5de2061-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.590856 4784 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/80b02221-f73a-4535-b2d9-c203e5de2061-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.632412 4784 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.632567 4784 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd") on node "crc" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.692978 4784 reconciler_common.go:293] "Volume detached for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.727821 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.777354 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.794318 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjxrf\" (UniqueName: \"kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf\") pod \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.794631 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts\") pod \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\" (UID: \"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.796259 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" (UID: "fc12a9ba-c259-43c6-ac41-c3c0bc2ac019"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.841700 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf" (OuterVolumeSpecName: "kube-api-access-gjxrf") pod "fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" (UID: "fc12a9ba-c259-43c6-ac41-c3c0bc2ac019"). InnerVolumeSpecName "kube-api-access-gjxrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.854060 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-84546" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.863017 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896495 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l54v\" (UniqueName: \"kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v\") pod \"df5a977b-4e06-442e-90cc-ccb94cf0929b\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896583 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts\") pod \"df5a977b-4e06-442e-90cc-ccb94cf0929b\" (UID: \"df5a977b-4e06-442e-90cc-ccb94cf0929b\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896603 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts\") pod \"7fb37670-5391-491a-9141-a113fdbd8650\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896692 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfhdp\" (UniqueName: \"kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp\") pod \"7fb37670-5391-491a-9141-a113fdbd8650\" (UID: \"7fb37670-5391-491a-9141-a113fdbd8650\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896727 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts\") pod \"1985ab81-9425-43c9-9395-2efcdd2b336d\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.896820 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt7zc\" (UniqueName: \"kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc\") pod \"1985ab81-9425-43c9-9395-2efcdd2b336d\" (UID: \"1985ab81-9425-43c9-9395-2efcdd2b336d\") " Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.897552 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.897574 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjxrf\" (UniqueName: \"kubernetes.io/projected/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019-kube-api-access-gjxrf\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.898933 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "df5a977b-4e06-442e-90cc-ccb94cf0929b" (UID: "df5a977b-4e06-442e-90cc-ccb94cf0929b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.899475 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1985ab81-9425-43c9-9395-2efcdd2b336d" (UID: "1985ab81-9425-43c9-9395-2efcdd2b336d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.899528 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7fb37670-5391-491a-9141-a113fdbd8650" (UID: "7fb37670-5391-491a-9141-a113fdbd8650"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.902459 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc" (OuterVolumeSpecName: "kube-api-access-mt7zc") pod "1985ab81-9425-43c9-9395-2efcdd2b336d" (UID: "1985ab81-9425-43c9-9395-2efcdd2b336d"). InnerVolumeSpecName "kube-api-access-mt7zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.904226 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v" (OuterVolumeSpecName: "kube-api-access-4l54v") pod "df5a977b-4e06-442e-90cc-ccb94cf0929b" (UID: "df5a977b-4e06-442e-90cc-ccb94cf0929b"). InnerVolumeSpecName "kube-api-access-4l54v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.906051 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp" (OuterVolumeSpecName: "kube-api-access-lfhdp") pod "7fb37670-5391-491a-9141-a113fdbd8650" (UID: "7fb37670-5391-491a-9141-a113fdbd8650"). InnerVolumeSpecName "kube-api-access-lfhdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999761 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt7zc\" (UniqueName: \"kubernetes.io/projected/1985ab81-9425-43c9-9395-2efcdd2b336d-kube-api-access-mt7zc\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999789 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l54v\" (UniqueName: \"kubernetes.io/projected/df5a977b-4e06-442e-90cc-ccb94cf0929b-kube-api-access-4l54v\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999798 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df5a977b-4e06-442e-90cc-ccb94cf0929b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999808 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fb37670-5391-491a-9141-a113fdbd8650-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999816 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfhdp\" (UniqueName: \"kubernetes.io/projected/7fb37670-5391-491a-9141-a113fdbd8650-kube-api-access-lfhdp\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:04 crc kubenswrapper[4784]: I1205 12:45:04.999825 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1985ab81-9425-43c9-9395-2efcdd2b336d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.224767 4784 generic.go:334] "Generic (PLEG): container finished" podID="c6b3acb2-88da-4560-9648-ee6d3a5d6d60" containerID="60b3c15bb993555975059ae32e06a6d5526bdaa264e716bdd2713eab3b46a248" exitCode=0 Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.224844 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b15b-account-create-update-9zgxk" event={"ID":"c6b3acb2-88da-4560-9648-ee6d3a5d6d60","Type":"ContainerDied","Data":"60b3c15bb993555975059ae32e06a6d5526bdaa264e716bdd2713eab3b46a248"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.227937 4784 generic.go:334] "Generic (PLEG): container finished" podID="22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" containerID="9ba8edb6af990a5c3afa3fba997d46f949d07f6c3f4b12dd5335de31d74b31ef" exitCode=0 Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.228044 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bc4gb" event={"ID":"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa","Type":"ContainerDied","Data":"9ba8edb6af990a5c3afa3fba997d46f949d07f6c3f4b12dd5335de31d74b31ef"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.229660 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ff23b53-bbeb-467c-9d74-713682fa23a4" containerID="7f8ceb121e8b5f4a850cd12af9c3e210962322abb391083b3945f3c71f2721b7" exitCode=0 Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.229727 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5940-account-create-update-9bnkr" event={"ID":"9ff23b53-bbeb-467c-9d74-713682fa23a4","Type":"ContainerDied","Data":"7f8ceb121e8b5f4a850cd12af9c3e210962322abb391083b3945f3c71f2721b7"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.233130 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-84546" event={"ID":"7fb37670-5391-491a-9141-a113fdbd8650","Type":"ContainerDied","Data":"94d2f163a644b8aea4242ecf2ea6233b963f034800076ab5c0c9b50cc534847d"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.233160 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-84546" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.233169 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94d2f163a644b8aea4242ecf2ea6233b963f034800076ab5c0c9b50cc534847d" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.235261 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-657f-account-create-update-w28tq" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.235285 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-657f-account-create-update-w28tq" event={"ID":"1985ab81-9425-43c9-9395-2efcdd2b336d","Type":"ContainerDied","Data":"cb1733c0d06b35767e055f3a07124f1064c0584482e153c809f643ed32198cff"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.235307 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb1733c0d06b35767e055f3a07124f1064c0584482e153c809f643ed32198cff" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.239303 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"80b02221-f73a-4535-b2d9-c203e5de2061","Type":"ContainerDied","Data":"2768dc94c73f2ea5bd501291986e189800ce13a06e6aa5d331233853eb3b143c"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.239354 4784 scope.go:117] "RemoveContainer" containerID="c72796d7f99eecf954fb94d594965c360f5801b5cb51e174051abbb7e1bc7b1d" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.239483 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.242219 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-7f4f-account-create-update-7mrkg" event={"ID":"fc12a9ba-c259-43c6-ac41-c3c0bc2ac019","Type":"ContainerDied","Data":"28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.242255 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28cd5fc3cb459f106a4597d353f7c5dbb8339102a430853ad9683976f3521826" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.242415 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-7f4f-account-create-update-7mrkg" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.244795 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dnwsj" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.245079 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dnwsj" event={"ID":"df5a977b-4e06-442e-90cc-ccb94cf0929b","Type":"ContainerDied","Data":"1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4"} Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.245132 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1be142ff1b7a0527d40e27fbabaee09ec1655577a17f731257fb27fa9e5ac4b4" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.284547 4784 scope.go:117] "RemoveContainer" containerID="91e146b11b7a754be41abfd3596f21157f44dae68d79af3757fc671008669720" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.313948 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.316073 4784 scope.go:117] "RemoveContainer" containerID="b6ea72f4c0636fa6fcf55d4829102b8db5d000b56c5326bf0e7cb7adb514c10c" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.320168 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.339859 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340330 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="thanos-sidecar" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340347 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="thanos-sidecar" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340372 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb37670-5391-491a-9141-a113fdbd8650" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340381 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb37670-5391-491a-9141-a113fdbd8650" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340396 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340405 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340417 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5a977b-4e06-442e-90cc-ccb94cf0929b" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340424 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5a977b-4e06-442e-90cc-ccb94cf0929b" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340443 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e0a0c7-4992-40a3-8835-7163421c900d" containerName="collect-profiles" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340449 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e0a0c7-4992-40a3-8835-7163421c900d" containerName="collect-profiles" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340458 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1985ab81-9425-43c9-9395-2efcdd2b336d" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340465 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1985ab81-9425-43c9-9395-2efcdd2b336d" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340477 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="init-config-reloader" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340485 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="init-config-reloader" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340495 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="prometheus" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340505 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="prometheus" Dec 05 12:45:05 crc kubenswrapper[4784]: E1205 12:45:05.340529 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="config-reloader" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340537 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="config-reloader" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340755 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1985ab81-9425-43c9-9395-2efcdd2b336d" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340777 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="prometheus" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340789 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="config-reloader" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340801 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" containerName="mariadb-account-create-update" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340815 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e0a0c7-4992-40a3-8835-7163421c900d" containerName="collect-profiles" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340830 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="df5a977b-4e06-442e-90cc-ccb94cf0929b" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340850 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fb37670-5391-491a-9141-a113fdbd8650" containerName="mariadb-database-create" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.340862 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" containerName="thanos-sidecar" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.343066 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.351438 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.352349 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.352481 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.352631 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.353960 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.354160 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-mphlc" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.363690 4784 scope.go:117] "RemoveContainer" containerID="f48a9ccf406483f0a446cb52c5e2946429b81856ad61cf5b3146d8b0adda3e99" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.367156 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.377901 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.431496 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.431597 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.431633 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.431702 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432093 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnczw\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432139 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432159 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432216 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432350 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432418 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.432487 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535115 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535177 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535295 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535338 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535940 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnczw\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535965 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.535984 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.536006 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.536029 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.536060 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.536151 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.540822 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.541736 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.542450 4784 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.542471 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b62a06a2697edf91f726f31fa2b1cc522cd33435aa7b897a0891f96c3d70ee18/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.543834 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.544813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.549083 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.549901 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.556938 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.558090 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.562417 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnczw\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.563566 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.590631 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:05 crc kubenswrapper[4784]: I1205 12:45:05.671252 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:07 crc kubenswrapper[4784]: I1205 12:45:07.015622 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b02221-f73a-4535-b2d9-c203e5de2061" path="/var/lib/kubelet/pods/80b02221-f73a-4535-b2d9-c203e5de2061/volumes" Dec 05 12:45:10 crc kubenswrapper[4784]: I1205 12:45:10.214480 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:45:10 crc kubenswrapper[4784]: I1205 12:45:10.897850 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:10 crc kubenswrapper[4784]: I1205 12:45:10.906446 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:10 crc kubenswrapper[4784]: I1205 12:45:10.916157 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:10 crc kubenswrapper[4784]: I1205 12:45:10.934285 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058277 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jztvg\" (UniqueName: \"kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg\") pod \"9ff23b53-bbeb-467c-9d74-713682fa23a4\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058351 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts\") pod \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058392 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmwq4\" (UniqueName: \"kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4\") pod \"39de58d0-af8e-4869-8f7c-34c94a412b1a\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058470 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkgmd\" (UniqueName: \"kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd\") pod \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\" (UID: \"c6b3acb2-88da-4560-9648-ee6d3a5d6d60\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058486 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts\") pod \"39de58d0-af8e-4869-8f7c-34c94a412b1a\" (UID: \"39de58d0-af8e-4869-8f7c-34c94a412b1a\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058553 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts\") pod \"9ff23b53-bbeb-467c-9d74-713682fa23a4\" (UID: \"9ff23b53-bbeb-467c-9d74-713682fa23a4\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058568 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts\") pod \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.058599 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzxlc\" (UniqueName: \"kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc\") pod \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\" (UID: \"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa\") " Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.059446 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ff23b53-bbeb-467c-9d74-713682fa23a4" (UID: "9ff23b53-bbeb-467c-9d74-713682fa23a4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.059870 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39de58d0-af8e-4869-8f7c-34c94a412b1a" (UID: "39de58d0-af8e-4869-8f7c-34c94a412b1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.059969 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c6b3acb2-88da-4560-9648-ee6d3a5d6d60" (UID: "c6b3acb2-88da-4560-9648-ee6d3a5d6d60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.060308 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" (UID: "22340d7d-4dcb-4d3e-b7b8-9e388587b3aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.064470 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg" (OuterVolumeSpecName: "kube-api-access-jztvg") pod "9ff23b53-bbeb-467c-9d74-713682fa23a4" (UID: "9ff23b53-bbeb-467c-9d74-713682fa23a4"). InnerVolumeSpecName "kube-api-access-jztvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.074440 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4" (OuterVolumeSpecName: "kube-api-access-nmwq4") pod "39de58d0-af8e-4869-8f7c-34c94a412b1a" (UID: "39de58d0-af8e-4869-8f7c-34c94a412b1a"). InnerVolumeSpecName "kube-api-access-nmwq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.075722 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc" (OuterVolumeSpecName: "kube-api-access-tzxlc") pod "22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" (UID: "22340d7d-4dcb-4d3e-b7b8-9e388587b3aa"). InnerVolumeSpecName "kube-api-access-tzxlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.075951 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd" (OuterVolumeSpecName: "kube-api-access-wkgmd") pod "c6b3acb2-88da-4560-9648-ee6d3a5d6d60" (UID: "c6b3acb2-88da-4560-9648-ee6d3a5d6d60"). InnerVolumeSpecName "kube-api-access-wkgmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.162007 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkgmd\" (UniqueName: \"kubernetes.io/projected/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-kube-api-access-wkgmd\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.162358 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39de58d0-af8e-4869-8f7c-34c94a412b1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165137 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ff23b53-bbeb-467c-9d74-713682fa23a4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165343 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165422 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzxlc\" (UniqueName: \"kubernetes.io/projected/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa-kube-api-access-tzxlc\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165490 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jztvg\" (UniqueName: \"kubernetes.io/projected/9ff23b53-bbeb-467c-9d74-713682fa23a4-kube-api-access-jztvg\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165551 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6b3acb2-88da-4560-9648-ee6d3a5d6d60-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.165612 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmwq4\" (UniqueName: \"kubernetes.io/projected/39de58d0-af8e-4869-8f7c-34c94a412b1a-kube-api-access-nmwq4\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.326475 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-xmxxf" event={"ID":"39de58d0-af8e-4869-8f7c-34c94a412b1a","Type":"ContainerDied","Data":"1012171a971829c52adb39030c6a1de632fe63558790f89047f9abd31370c52d"} Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.326771 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1012171a971829c52adb39030c6a1de632fe63558790f89047f9abd31370c52d" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.326491 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-xmxxf" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.328593 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b15b-account-create-update-9zgxk" event={"ID":"c6b3acb2-88da-4560-9648-ee6d3a5d6d60","Type":"ContainerDied","Data":"0d3aa3d9cdbb4d67728cb146306a49a8ed694fe190f6f5d72b5760298cc1edcd"} Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.328625 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d3aa3d9cdbb4d67728cb146306a49a8ed694fe190f6f5d72b5760298cc1edcd" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.328662 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b15b-account-create-update-9zgxk" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.337256 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bc4gb" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.337243 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bc4gb" event={"ID":"22340d7d-4dcb-4d3e-b7b8-9e388587b3aa","Type":"ContainerDied","Data":"c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0"} Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.337409 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9de01a757031e0e2c886b9957b2d11a5971494d90d7326b70ce427ec9be1db0" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.340907 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5940-account-create-update-9bnkr" event={"ID":"9ff23b53-bbeb-467c-9d74-713682fa23a4","Type":"ContainerDied","Data":"f430b943196dc941aa8ccdc36a6e1b794e7978c84cb36f2a41b683d22d8be20a"} Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.340967 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f430b943196dc941aa8ccdc36a6e1b794e7978c84cb36f2a41b683d22d8be20a" Dec 05 12:45:11 crc kubenswrapper[4784]: I1205 12:45:11.341001 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5940-account-create-update-9bnkr" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.764823 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-xcs6s"] Dec 05 12:45:12 crc kubenswrapper[4784]: E1205 12:45:12.765153 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff23b53-bbeb-467c-9d74-713682fa23a4" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765166 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff23b53-bbeb-467c-9d74-713682fa23a4" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: E1205 12:45:12.765201 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765207 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: E1205 12:45:12.765214 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b3acb2-88da-4560-9648-ee6d3a5d6d60" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765220 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b3acb2-88da-4560-9648-ee6d3a5d6d60" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: E1205 12:45:12.765237 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39de58d0-af8e-4869-8f7c-34c94a412b1a" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765243 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="39de58d0-af8e-4869-8f7c-34c94a412b1a" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765426 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff23b53-bbeb-467c-9d74-713682fa23a4" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765440 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6b3acb2-88da-4560-9648-ee6d3a5d6d60" containerName="mariadb-account-create-update" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765456 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="39de58d0-af8e-4869-8f7c-34c94a412b1a" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.765473 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" containerName="mariadb-database-create" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.766054 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.769572 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mz2l2" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.769747 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.777747 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-xcs6s"] Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.811424 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwcrf\" (UniqueName: \"kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.811497 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.811526 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.811545 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.912947 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.912983 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.913098 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwcrf\" (UniqueName: \"kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.913130 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.919342 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.921463 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.921798 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:12 crc kubenswrapper[4784]: I1205 12:45:12.929837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwcrf\" (UniqueName: \"kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf\") pod \"glance-db-sync-xcs6s\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:13 crc kubenswrapper[4784]: I1205 12:45:13.084548 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xcs6s" Dec 05 12:45:15 crc kubenswrapper[4784]: W1205 12:45:15.702620 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bc3d9db_9849_427a_a9a7_ada6e23ca9e3.slice/crio-3684af970a2c3f57554a85ceb125b9cc1659940b40db1a61afb28c53e8dd6091 WatchSource:0}: Error finding container 3684af970a2c3f57554a85ceb125b9cc1659940b40db1a61afb28c53e8dd6091: Status 404 returned error can't find the container with id 3684af970a2c3f57554a85ceb125b9cc1659940b40db1a61afb28c53e8dd6091 Dec 05 12:45:15 crc kubenswrapper[4784]: I1205 12:45:15.708496 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 12:45:15 crc kubenswrapper[4784]: I1205 12:45:15.896488 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-xcs6s"] Dec 05 12:45:15 crc kubenswrapper[4784]: W1205 12:45:15.902464 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda94ed534_bf7b_4fdc_9c79_0fa4425cb785.slice/crio-f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e WatchSource:0}: Error finding container f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e: Status 404 returned error can't find the container with id f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.391214 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerStarted","Data":"3684af970a2c3f57554a85ceb125b9cc1659940b40db1a61afb28c53e8dd6091"} Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.392894 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkqx2" event={"ID":"ffc17758-4586-495d-842c-68c47898b1c1","Type":"ContainerStarted","Data":"55dc3dd3218c3888d7da7e7118ddd8a32bf8c9cf47f8e3166dd34b285f30eb28"} Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.395040 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-8rhvb" event={"ID":"44208e80-4774-4f47-93a8-6e23b7402949","Type":"ContainerStarted","Data":"6e2a893e3e5a3dbadc736398e703e136874ad4643634f2fe8c722f57069c60a7"} Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.398107 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xcs6s" event={"ID":"a94ed534-bf7b-4fdc-9c79-0fa4425cb785","Type":"ContainerStarted","Data":"f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e"} Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.414784 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gkqx2" podStartSLOduration=2.956686067 podStartE2EDuration="16.41476565s" podCreationTimestamp="2025-12-05 12:45:00 +0000 UTC" firstStartedPulling="2025-12-05 12:45:01.857727331 +0000 UTC m=+1181.277794146" lastFinishedPulling="2025-12-05 12:45:15.315806914 +0000 UTC m=+1194.735873729" observedRunningTime="2025-12-05 12:45:16.413485991 +0000 UTC m=+1195.833552806" watchObservedRunningTime="2025-12-05 12:45:16.41476565 +0000 UTC m=+1195.834832485" Dec 05 12:45:16 crc kubenswrapper[4784]: I1205 12:45:16.432500 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-8rhvb" podStartSLOduration=2.202578217 podStartE2EDuration="14.432481634s" podCreationTimestamp="2025-12-05 12:45:02 +0000 UTC" firstStartedPulling="2025-12-05 12:45:03.14713215 +0000 UTC m=+1182.567198965" lastFinishedPulling="2025-12-05 12:45:15.377035557 +0000 UTC m=+1194.797102382" observedRunningTime="2025-12-05 12:45:16.426114875 +0000 UTC m=+1195.846181690" watchObservedRunningTime="2025-12-05 12:45:16.432481634 +0000 UTC m=+1195.852548449" Dec 05 12:45:18 crc kubenswrapper[4784]: I1205 12:45:18.420634 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerStarted","Data":"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62"} Dec 05 12:45:20 crc kubenswrapper[4784]: I1205 12:45:20.437176 4784 generic.go:334] "Generic (PLEG): container finished" podID="44208e80-4774-4f47-93a8-6e23b7402949" containerID="6e2a893e3e5a3dbadc736398e703e136874ad4643634f2fe8c722f57069c60a7" exitCode=0 Dec 05 12:45:20 crc kubenswrapper[4784]: I1205 12:45:20.437250 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-8rhvb" event={"ID":"44208e80-4774-4f47-93a8-6e23b7402949","Type":"ContainerDied","Data":"6e2a893e3e5a3dbadc736398e703e136874ad4643634f2fe8c722f57069c60a7"} Dec 05 12:45:21 crc kubenswrapper[4784]: I1205 12:45:21.174312 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:45:21 crc kubenswrapper[4784]: I1205 12:45:21.191370 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f34e93a8-02d9-44ef-a18e-13ce24c3f9a6-etc-swift\") pod \"swift-storage-0\" (UID: \"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6\") " pod="openstack/swift-storage-0" Dec 05 12:45:21 crc kubenswrapper[4784]: I1205 12:45:21.416533 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 12:45:21 crc kubenswrapper[4784]: I1205 12:45:21.449013 4784 generic.go:334] "Generic (PLEG): container finished" podID="ffc17758-4586-495d-842c-68c47898b1c1" containerID="55dc3dd3218c3888d7da7e7118ddd8a32bf8c9cf47f8e3166dd34b285f30eb28" exitCode=0 Dec 05 12:45:21 crc kubenswrapper[4784]: I1205 12:45:21.449254 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkqx2" event={"ID":"ffc17758-4586-495d-842c-68c47898b1c1","Type":"ContainerDied","Data":"55dc3dd3218c3888d7da7e7118ddd8a32bf8c9cf47f8e3166dd34b285f30eb28"} Dec 05 12:45:24 crc kubenswrapper[4784]: I1205 12:45:24.479673 4784 generic.go:334] "Generic (PLEG): container finished" podID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerID="520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62" exitCode=0 Dec 05 12:45:24 crc kubenswrapper[4784]: I1205 12:45:24.479895 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerDied","Data":"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62"} Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.514242 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-8rhvb" event={"ID":"44208e80-4774-4f47-93a8-6e23b7402949","Type":"ContainerDied","Data":"b94ba3bd2bb106a3bb79fd836c8a92c961990c3a51a7749e969225a6f09f73dd"} Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.514855 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b94ba3bd2bb106a3bb79fd836c8a92c961990c3a51a7749e969225a6f09f73dd" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.516726 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gkqx2" event={"ID":"ffc17758-4586-495d-842c-68c47898b1c1","Type":"ContainerDied","Data":"0ad33ac8a5994d1772cf49197fc36582724959c46fb86d61c67608ba878c1f63"} Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.516754 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ad33ac8a5994d1772cf49197fc36582724959c46fb86d61c67608ba878c1f63" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.782217 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.794653 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.897671 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle\") pod \"44208e80-4774-4f47-93a8-6e23b7402949\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.897799 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle\") pod \"ffc17758-4586-495d-842c-68c47898b1c1\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.897843 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data\") pod \"44208e80-4774-4f47-93a8-6e23b7402949\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.897931 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data\") pod \"44208e80-4774-4f47-93a8-6e23b7402949\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.898715 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k6l9\" (UniqueName: \"kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9\") pod \"44208e80-4774-4f47-93a8-6e23b7402949\" (UID: \"44208e80-4774-4f47-93a8-6e23b7402949\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.898756 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm22m\" (UniqueName: \"kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m\") pod \"ffc17758-4586-495d-842c-68c47898b1c1\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.898862 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data\") pod \"ffc17758-4586-495d-842c-68c47898b1c1\" (UID: \"ffc17758-4586-495d-842c-68c47898b1c1\") " Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.905615 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m" (OuterVolumeSpecName: "kube-api-access-hm22m") pod "ffc17758-4586-495d-842c-68c47898b1c1" (UID: "ffc17758-4586-495d-842c-68c47898b1c1"). InnerVolumeSpecName "kube-api-access-hm22m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.908594 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9" (OuterVolumeSpecName: "kube-api-access-6k6l9") pod "44208e80-4774-4f47-93a8-6e23b7402949" (UID: "44208e80-4774-4f47-93a8-6e23b7402949"). InnerVolumeSpecName "kube-api-access-6k6l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.920951 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "44208e80-4774-4f47-93a8-6e23b7402949" (UID: "44208e80-4774-4f47-93a8-6e23b7402949"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.933650 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffc17758-4586-495d-842c-68c47898b1c1" (UID: "ffc17758-4586-495d-842c-68c47898b1c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.934101 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44208e80-4774-4f47-93a8-6e23b7402949" (UID: "44208e80-4774-4f47-93a8-6e23b7402949"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.962972 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data" (OuterVolumeSpecName: "config-data") pod "44208e80-4774-4f47-93a8-6e23b7402949" (UID: "44208e80-4774-4f47-93a8-6e23b7402949"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:27 crc kubenswrapper[4784]: I1205 12:45:27.978604 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data" (OuterVolumeSpecName: "config-data") pod "ffc17758-4586-495d-842c-68c47898b1c1" (UID: "ffc17758-4586-495d-842c-68c47898b1c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.001939 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.001982 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.001993 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc17758-4586-495d-842c-68c47898b1c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.002003 4784 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.002012 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44208e80-4774-4f47-93a8-6e23b7402949-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.002022 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k6l9\" (UniqueName: \"kubernetes.io/projected/44208e80-4774-4f47-93a8-6e23b7402949-kube-api-access-6k6l9\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.002031 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm22m\" (UniqueName: \"kubernetes.io/projected/ffc17758-4586-495d-842c-68c47898b1c1-kube-api-access-hm22m\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.227079 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 12:45:28 crc kubenswrapper[4784]: W1205 12:45:28.234668 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf34e93a8_02d9_44ef_a18e_13ce24c3f9a6.slice/crio-73364555693d1ba0e61106c235fde98f7ea8b59b72cb1f3d2c28dd6a5674f74f WatchSource:0}: Error finding container 73364555693d1ba0e61106c235fde98f7ea8b59b72cb1f3d2c28dd6a5674f74f: Status 404 returned error can't find the container with id 73364555693d1ba0e61106c235fde98f7ea8b59b72cb1f3d2c28dd6a5674f74f Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.541819 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"73364555693d1ba0e61106c235fde98f7ea8b59b72cb1f3d2c28dd6a5674f74f"} Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.544938 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerStarted","Data":"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c"} Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.546848 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xcs6s" event={"ID":"a94ed534-bf7b-4fdc-9c79-0fa4425cb785","Type":"ContainerStarted","Data":"ad747a9b988792246f6406b792de3f2be8a3af509d4378bec43549e9fc5f91ca"} Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.546920 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gkqx2" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.546986 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-8rhvb" Dec 05 12:45:28 crc kubenswrapper[4784]: I1205 12:45:28.570977 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-xcs6s" podStartSLOduration=4.81151218 podStartE2EDuration="16.570958533s" podCreationTimestamp="2025-12-05 12:45:12 +0000 UTC" firstStartedPulling="2025-12-05 12:45:15.905110001 +0000 UTC m=+1195.325176816" lastFinishedPulling="2025-12-05 12:45:27.664556334 +0000 UTC m=+1207.084623169" observedRunningTime="2025-12-05 12:45:28.563450979 +0000 UTC m=+1207.983517794" watchObservedRunningTime="2025-12-05 12:45:28.570958533 +0000 UTC m=+1207.991025348" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.115859 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-pblr2"] Dec 05 12:45:29 crc kubenswrapper[4784]: E1205 12:45:29.116447 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffc17758-4586-495d-842c-68c47898b1c1" containerName="keystone-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.116464 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffc17758-4586-495d-842c-68c47898b1c1" containerName="keystone-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: E1205 12:45:29.116495 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44208e80-4774-4f47-93a8-6e23b7402949" containerName="watcher-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.116501 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="44208e80-4774-4f47-93a8-6e23b7402949" containerName="watcher-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.116647 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffc17758-4586-495d-842c-68c47898b1c1" containerName="keystone-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.116665 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="44208e80-4774-4f47-93a8-6e23b7402949" containerName="watcher-db-sync" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.117238 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.121511 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.121733 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.121876 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.121989 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.123817 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lkxwc" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.142053 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pblr2"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155237 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155289 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155318 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155353 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155377 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.155400 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkqqz\" (UniqueName: \"kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.173896 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.175479 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.224362 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268036 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268093 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268148 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268174 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268211 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268234 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268255 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m7kb\" (UniqueName: \"kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268286 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268310 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268331 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkqqz\" (UniqueName: \"kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.268347 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.342939 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkqqz\" (UniqueName: \"kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.353897 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.361886 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.365981 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.366205 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-gdqkr" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.371157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.371267 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.371338 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m7kb\" (UniqueName: \"kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.371404 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.371452 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.372548 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.373237 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.373888 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.395941 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.403369 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.415906 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.420841 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m7kb\" (UniqueName: \"kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb\") pod \"dnsmasq-dns-5f486db75f-kwgvh\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.421383 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.424084 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.424640 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.425029 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data\") pod \"keystone-bootstrap-pblr2\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.430468 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.431859 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.457604 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.466269 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-24k4j"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.467972 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.486671 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.487254 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.487354 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.487436 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kcnls" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488336 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488374 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488408 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nscz6\" (UniqueName: \"kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488428 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488451 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxmh\" (UniqueName: \"kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488468 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488486 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488512 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.488578 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.510606 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.548661 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-24k4j"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.566683 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591320 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591345 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591377 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nscz6\" (UniqueName: \"kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591396 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591414 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591439 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxmh\" (UniqueName: \"kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591456 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591470 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591493 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591515 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591557 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knrkq\" (UniqueName: \"kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591580 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591603 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.591639 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.592578 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.596802 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.612143 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.613277 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.622125 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.626155 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.626832 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.635720 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.638776 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.643225 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-vt6gx"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.658664 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"0bdb38a3b038b1f99a84c4b2c7649fb7651ee88dda1bc8151f143ae0b44fce16"} Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.658992 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.663250 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vt6gx"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.672791 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxmh\" (UniqueName: \"kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh\") pod \"watcher-applier-0\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.674779 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.674959 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.675065 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-bpx25" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694426 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694471 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694527 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nzbd\" (UniqueName: \"kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694556 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694605 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694632 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694664 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694683 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694713 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694747 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xmx9\" (UniqueName: \"kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694766 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694801 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694824 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.694847 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knrkq\" (UniqueName: \"kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.700293 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.702959 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.711684 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.711862 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.714803 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.730251 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.750780 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nscz6\" (UniqueName: \"kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6\") pod \"watcher-api-0\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " pod="openstack/watcher-api-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.759832 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knrkq\" (UniqueName: \"kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq\") pod \"cinder-db-sync-24k4j\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.770866 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.772262 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.777042 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.778490 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.783245 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-ljvqg" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.791565 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798492 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nzbd\" (UniqueName: \"kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798556 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798589 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798636 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xmx9\" (UniqueName: \"kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798652 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798669 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798702 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.798740 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.811243 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.821720 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.848971 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nzbd\" (UniqueName: \"kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.870230 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.878530 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.882029 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xmx9\" (UniqueName: \"kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.900081 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.900147 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.900213 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mkqw\" (UniqueName: \"kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.900233 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.900310 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.903439 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.905346 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.920483 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.948048 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config\") pod \"neutron-db-sync-vt6gx\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.963804 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.972299 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.974406 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.980569 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.980767 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:45:29 crc kubenswrapper[4784]: I1205 12:45:29.990289 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003709 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003789 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003816 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003865 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003910 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.003961 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m455j\" (UniqueName: \"kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.004006 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mkqw\" (UniqueName: \"kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.004086 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.004116 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.004153 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.004175 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.017670 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.017921 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.018794 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.019749 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-24k4j" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.032736 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-bwtqt"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.049795 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.052891 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.054670 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nndgt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.058541 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.060815 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.078292 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.080040 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-bwtqt"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.109519 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mkqw\" (UniqueName: \"kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw\") pod \"horizon-57f74fb67c-6g957\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.142572 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.145211 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.147023 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.147073 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9h8j\" (UniqueName: \"kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.147438 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.147693 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.149104 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m455j\" (UniqueName: \"kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.149714 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.150176 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.150605 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.150866 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.173859 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.211256 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.211798 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.214385 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.281340 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.281740 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9h8j\" (UniqueName: \"kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.281846 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.292384 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zx82c"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.293617 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.297481 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.297527 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.297483 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9l6d5" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.363690 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.370153 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zx82c"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.377799 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.380209 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.383875 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.383934 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw99n\" (UniqueName: \"kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.384010 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.384041 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.384074 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.384444 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.394501 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.394527 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m455j\" (UniqueName: \"kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.396525 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.399162 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.401601 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.405362 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.407492 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.408106 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9h8j\" (UniqueName: \"kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j\") pod \"barbican-db-sync-bwtqt\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.408882 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.427820 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486510 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486581 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486609 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68df4\" (UniqueName: \"kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486647 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486665 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486700 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486726 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486754 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xkcl\" (UniqueName: \"kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486772 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486806 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486831 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw99n\" (UniqueName: \"kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486867 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486892 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486920 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.486937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.494742 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.507493 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.509308 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.512061 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.514300 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.518114 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw99n\" (UniqueName: \"kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:30 crc kubenswrapper[4784]: I1205 12:45:30.533070 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588370 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588436 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588475 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xkcl\" (UniqueName: \"kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588529 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588584 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588616 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588651 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588691 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588743 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68df4\" (UniqueName: \"kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.588786 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.590011 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.590681 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.590797 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.591291 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.591817 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.592303 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.607891 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.608904 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: W1205 12:45:30.613031 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0516148_87e4_4900_8ad9_e4d81d6599e4.slice/crio-b3e75db664bb18b3a448c3e494a325b46e129245280bedce5b994ca03752f09b WatchSource:0}: Error finding container b3e75db664bb18b3a448c3e494a325b46e129245280bedce5b994ca03752f09b: Status 404 returned error can't find the container with id b3e75db664bb18b3a448c3e494a325b46e129245280bedce5b994ca03752f09b Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.614532 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data\") pod \"placement-db-sync-zx82c\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.620678 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xkcl\" (UniqueName: \"kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl\") pod \"horizon-74474b4965-5h5n8\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.645882 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68df4\" (UniqueName: \"kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4\") pod \"dnsmasq-dns-6f76dd6ddf-4shbw\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.652052 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zx82c" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.681439 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pblr2"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.704247 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.720852 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.756032 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-24k4j"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.758240 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.776643 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" event={"ID":"a0516148-87e4-4900-8ad9-e4d81d6599e4","Type":"ContainerStarted","Data":"b3e75db664bb18b3a448c3e494a325b46e129245280bedce5b994ca03752f09b"} Dec 05 12:45:32 crc kubenswrapper[4784]: W1205 12:45:30.797836 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod129c3642_c456_4b24_bef9_9bade50088d7.slice/crio-96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602 WatchSource:0}: Error finding container 96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602: Status 404 returned error can't find the container with id 96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602 Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.797892 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"37df220c4860e1f1e1756d614479c320830625c2b8ae105aa900f082aab90112"} Dec 05 12:45:32 crc kubenswrapper[4784]: W1205 12:45:30.811028 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode187592b_b331_4144_9a27_ba81e79121b6.slice/crio-cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a WatchSource:0}: Error finding container cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a: Status 404 returned error can't find the container with id cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:30.971111 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.771159 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.846732 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.850431 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-24k4j" event={"ID":"e187592b-b331-4144-9a27-ba81e79121b6","Type":"ContainerStarted","Data":"cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.855683 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.857062 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.859521 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"68e0f9f2f6e94d652b083c74a49a3d50116e24d2dcadc9f74aefcb21098315c5"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.864823 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.900205 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerStarted","Data":"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.900244 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerStarted","Data":"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.903677 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pblr2" event={"ID":"129c3642-c456-4b24-bef9-9bade50088d7","Type":"ContainerStarted","Data":"b646fcb2b2c249a594e14c73e5dc74284417d3cad63669cec82c1e0dcca98dad"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.903705 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pblr2" event={"ID":"129c3642-c456-4b24-bef9-9bade50088d7","Type":"ContainerStarted","Data":"96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.911006 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"1b372793-eadc-4593-af62-97e7d647c76d","Type":"ContainerStarted","Data":"f64fbdcfff6e1d575ee3e74a37671546dd15cffdcaf6eb862b9e06ee6304fdb4"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.931981 4784 generic.go:334] "Generic (PLEG): container finished" podID="a0516148-87e4-4900-8ad9-e4d81d6599e4" containerID="0390672ffe67fd3febe71f5990ad3d6305ebdd0e16c0af1610e7fcd2b8bcc149" exitCode=0 Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.932048 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" event={"ID":"a0516148-87e4-4900-8ad9-e4d81d6599e4","Type":"ContainerDied","Data":"0390672ffe67fd3febe71f5990ad3d6305ebdd0e16c0af1610e7fcd2b8bcc149"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.933389 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.933555 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.933592 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5sjg\" (UniqueName: \"kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.933829 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.933889 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.941571 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerStarted","Data":"cf01e0f8a7896ba8a46a0e2e48080038f0f871a17c95e64d21692fc651fb129c"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.941790 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerStarted","Data":"1fbd721a50e421d6dd1bb22839a59d38d3aa6766f57ca15f1191a0d564e635df"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.951743 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=26.951588352999998 podStartE2EDuration="26.951588353s" podCreationTimestamp="2025-12-05 12:45:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:31.940012791 +0000 UTC m=+1211.360079616" watchObservedRunningTime="2025-12-05 12:45:31.951588353 +0000 UTC m=+1211.371655168" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:31.982397 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-pblr2" podStartSLOduration=2.9823823149999997 podStartE2EDuration="2.982382315s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:31.979631749 +0000 UTC m=+1211.399698564" watchObservedRunningTime="2025-12-05 12:45:31.982382315 +0000 UTC m=+1211.402449130" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035258 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035305 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035339 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035711 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035735 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5sjg\" (UniqueName: \"kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.035625 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.036539 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.036769 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.053030 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5sjg\" (UniqueName: \"kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.053357 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key\") pod \"horizon-5fd4688c89-6ndnj\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.202813 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.331205 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.536307 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vt6gx"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.567857 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.886896 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zx82c"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.902987 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.912589 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.938547 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-bwtqt"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.962652 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.969341 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerStarted","Data":"91b4a91b5f7fffd06b49ced759e917b39298c704edd1d39156031be97bd54f9d"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.969489 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api-log" containerID="cri-o://cf01e0f8a7896ba8a46a0e2e48080038f0f871a17c95e64d21692fc651fb129c" gracePeriod=30 Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.969542 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" containerID="cri-o://91b4a91b5f7fffd06b49ced759e917b39298c704edd1d39156031be97bd54f9d" gracePeriod=30 Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.969927 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.977656 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.979052 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"1e7e7efcf39b86f0bc658b2760cf0952e89fb26c476cc1d8ec408675c49f4d59"} Dec 05 12:45:32 crc kubenswrapper[4784]: I1205 12:45:32.983616 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.149:9322/\": EOF" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.003065 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=4.003047815 podStartE2EDuration="4.003047815s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:32.99264818 +0000 UTC m=+1212.412714995" watchObservedRunningTime="2025-12-05 12:45:33.003047815 +0000 UTC m=+1212.423114630" Dec 05 12:45:33 crc kubenswrapper[4784]: W1205 12:45:33.252146 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee259e74_24f6_4a39_b3d9_3bd926ace782.slice/crio-1c115486369e3e3d38f90f80740a7547e2fa55768f3f637d76baa41fc815389c WatchSource:0}: Error finding container 1c115486369e3e3d38f90f80740a7547e2fa55768f3f637d76baa41fc815389c: Status 404 returned error can't find the container with id 1c115486369e3e3d38f90f80740a7547e2fa55768f3f637d76baa41fc815389c Dec 05 12:45:33 crc kubenswrapper[4784]: W1205 12:45:33.268615 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7216e2bb_f775_4b6b_9f34_b966f26f4002.slice/crio-30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a WatchSource:0}: Error finding container 30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a: Status 404 returned error can't find the container with id 30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.486052 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.572805 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb\") pod \"a0516148-87e4-4900-8ad9-e4d81d6599e4\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.572880 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m7kb\" (UniqueName: \"kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb\") pod \"a0516148-87e4-4900-8ad9-e4d81d6599e4\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.572914 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb\") pod \"a0516148-87e4-4900-8ad9-e4d81d6599e4\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.573055 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config\") pod \"a0516148-87e4-4900-8ad9-e4d81d6599e4\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.573119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc\") pod \"a0516148-87e4-4900-8ad9-e4d81d6599e4\" (UID: \"a0516148-87e4-4900-8ad9-e4d81d6599e4\") " Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.631025 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb" (OuterVolumeSpecName: "kube-api-access-4m7kb") pod "a0516148-87e4-4900-8ad9-e4d81d6599e4" (UID: "a0516148-87e4-4900-8ad9-e4d81d6599e4"). InnerVolumeSpecName "kube-api-access-4m7kb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.631334 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a0516148-87e4-4900-8ad9-e4d81d6599e4" (UID: "a0516148-87e4-4900-8ad9-e4d81d6599e4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.631820 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0516148-87e4-4900-8ad9-e4d81d6599e4" (UID: "a0516148-87e4-4900-8ad9-e4d81d6599e4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.673357 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config" (OuterVolumeSpecName: "config") pod "a0516148-87e4-4900-8ad9-e4d81d6599e4" (UID: "a0516148-87e4-4900-8ad9-e4d81d6599e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.676664 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.676690 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m7kb\" (UniqueName: \"kubernetes.io/projected/a0516148-87e4-4900-8ad9-e4d81d6599e4-kube-api-access-4m7kb\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.676701 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.676709 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.687666 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a0516148-87e4-4900-8ad9-e4d81d6599e4" (UID: "a0516148-87e4-4900-8ad9-e4d81d6599e4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:33 crc kubenswrapper[4784]: I1205 12:45:33.778796 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0516148-87e4-4900-8ad9-e4d81d6599e4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:33.999699 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vt6gx" event={"ID":"7216e2bb-f775-4b6b-9f34-b966f26f4002","Type":"ContainerStarted","Data":"cef934a659ba928393f7fd2f3159621d3c13b65bcacd8f43dbaa52033ec0dda7"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:33.999739 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vt6gx" event={"ID":"7216e2bb-f775-4b6b-9f34-b966f26f4002","Type":"ContainerStarted","Data":"30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.003097 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerStarted","Data":"1c115486369e3e3d38f90f80740a7547e2fa55768f3f637d76baa41fc815389c"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.005252 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bwtqt" event={"ID":"c921ceb1-e577-4b4a-be99-3544491930d3","Type":"ContainerStarted","Data":"6931e4ec7e72f947059231343196cf10337a317defddc6d14c4e6272f74e04c6"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.007816 4784 generic.go:334] "Generic (PLEG): container finished" podID="23695adb-c981-437e-b37d-cef86d57d515" containerID="cf01e0f8a7896ba8a46a0e2e48080038f0f871a17c95e64d21692fc651fb129c" exitCode=143 Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.007861 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerDied","Data":"cf01e0f8a7896ba8a46a0e2e48080038f0f871a17c95e64d21692fc651fb129c"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.009266 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerStarted","Data":"5c063822994f76aa4464f8ed523aaa42a21641ddf7dd8caa41a05a6b9fdd2bff"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.010767 4784 generic.go:334] "Generic (PLEG): container finished" podID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerID="16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2" exitCode=0 Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.010805 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" event={"ID":"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1","Type":"ContainerDied","Data":"16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.010820 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" event={"ID":"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1","Type":"ContainerStarted","Data":"119cdb7237863791afb33c6a032531c723f5445d19ab6f769e88a59b154b8c81"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.039003 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"1b372793-eadc-4593-af62-97e7d647c76d","Type":"ContainerStarted","Data":"cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.047601 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57f74fb67c-6g957" event={"ID":"23e368b2-6047-40b1-afd5-7899c48c94ad","Type":"ContainerStarted","Data":"f1a2583d87c388c0ffd85a331ab1748eda2988800ff274eb5bd4ddee32e4161f"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.050939 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74474b4965-5h5n8" event={"ID":"5d78607c-ec1e-429a-b458-217f7de86abc","Type":"ContainerStarted","Data":"ad3631de0d715ed39a2dab76ba4ebd62242cfc8060c71787c5bd1abe496e1f93"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.046152 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-vt6gx" podStartSLOduration=5.046134695 podStartE2EDuration="5.046134695s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:34.018676977 +0000 UTC m=+1213.438743792" watchObservedRunningTime="2025-12-05 12:45:34.046134695 +0000 UTC m=+1213.466201500" Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.063211 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" event={"ID":"a0516148-87e4-4900-8ad9-e4d81d6599e4","Type":"ContainerDied","Data":"b3e75db664bb18b3a448c3e494a325b46e129245280bedce5b994ca03752f09b"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.063237 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f486db75f-kwgvh" Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.063264 4784 scope.go:117] "RemoveContainer" containerID="0390672ffe67fd3febe71f5990ad3d6305ebdd0e16c0af1610e7fcd2b8bcc149" Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.079943 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zx82c" event={"ID":"851aaea1-2d18-4f91-b410-5fdb0a7f42ec","Type":"ContainerStarted","Data":"f81046e74ef906ad90d19cdd55431b7a4803c0fe17668bebf94813a8f62e713a"} Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.082644 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=2.447676402 podStartE2EDuration="5.082628826s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:30.798581335 +0000 UTC m=+1210.218648150" lastFinishedPulling="2025-12-05 12:45:33.433533759 +0000 UTC m=+1212.853600574" observedRunningTime="2025-12-05 12:45:34.068127903 +0000 UTC m=+1213.488194738" watchObservedRunningTime="2025-12-05 12:45:34.082628826 +0000 UTC m=+1213.502695641" Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.161480 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:45:34 crc kubenswrapper[4784]: W1205 12:45:34.162347 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb3d333b_cc81_49d9_a8bf_1bdad94739c2.slice/crio-a72c05a16be9268882efca36976bbc2f5d1041de56488fe7181a5dba0cad8c59 WatchSource:0}: Error finding container a72c05a16be9268882efca36976bbc2f5d1041de56488fe7181a5dba0cad8c59: Status 404 returned error can't find the container with id a72c05a16be9268882efca36976bbc2f5d1041de56488fe7181a5dba0cad8c59 Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.182202 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.197152 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f486db75f-kwgvh"] Dec 05 12:45:34 crc kubenswrapper[4784]: I1205 12:45:34.964710 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.013302 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0516148-87e4-4900-8ad9-e4d81d6599e4" path="/var/lib/kubelet/pods/a0516148-87e4-4900-8ad9-e4d81d6599e4/volumes" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.013994 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.092242 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" event={"ID":"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1","Type":"ContainerStarted","Data":"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf"} Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.093495 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.099839 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fd4688c89-6ndnj" event={"ID":"db3d333b-cc81-49d9-a8bf-1bdad94739c2","Type":"ContainerStarted","Data":"a72c05a16be9268882efca36976bbc2f5d1041de56488fe7181a5dba0cad8c59"} Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.111127 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" podStartSLOduration=6.11110504 podStartE2EDuration="6.11110504s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:45:35.10852206 +0000 UTC m=+1214.528588875" watchObservedRunningTime="2025-12-05 12:45:35.11110504 +0000 UTC m=+1214.531171855" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.671982 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.672143 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:35 crc kubenswrapper[4784]: I1205 12:45:35.678815 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:36 crc kubenswrapper[4784]: I1205 12:45:36.126745 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 12:45:37 crc kubenswrapper[4784]: I1205 12:45:37.110643 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.149:9322/\": read tcp 10.217.0.2:43602->10.217.0.149:9322: read: connection reset by peer" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.149501 4784 generic.go:334] "Generic (PLEG): container finished" podID="129c3642-c456-4b24-bef9-9bade50088d7" containerID="b646fcb2b2c249a594e14c73e5dc74284417d3cad63669cec82c1e0dcca98dad" exitCode=0 Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.149570 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pblr2" event={"ID":"129c3642-c456-4b24-bef9-9bade50088d7","Type":"ContainerDied","Data":"b646fcb2b2c249a594e14c73e5dc74284417d3cad63669cec82c1e0dcca98dad"} Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.153619 4784 generic.go:334] "Generic (PLEG): container finished" podID="23695adb-c981-437e-b37d-cef86d57d515" containerID="91b4a91b5f7fffd06b49ced759e917b39298c704edd1d39156031be97bd54f9d" exitCode=0 Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.154642 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerDied","Data":"91b4a91b5f7fffd06b49ced759e917b39298c704edd1d39156031be97bd54f9d"} Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.669034 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.697565 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:45:38 crc kubenswrapper[4784]: E1205 12:45:38.698024 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0516148-87e4-4900-8ad9-e4d81d6599e4" containerName="init" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.698041 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0516148-87e4-4900-8ad9-e4d81d6599e4" containerName="init" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.698264 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0516148-87e4-4900-8ad9-e4d81d6599e4" containerName="init" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.699223 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.705515 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.706869 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.755906 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.783376 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7cdb6b7d4-mvtql"] Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.797690 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.832828 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.832909 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.832962 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.832990 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.833112 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.833156 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4c48\" (UniqueName: \"kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.833173 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.858000 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cdb6b7d4-mvtql"] Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935464 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935538 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935581 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935612 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-tls-certs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935724 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-config-data\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935758 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eba67d7-3c83-47c9-bdc2-0946f5839efd-logs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935785 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txcvc\" (UniqueName: \"kubernetes.io/projected/5eba67d7-3c83-47c9-bdc2-0946f5839efd-kube-api-access-txcvc\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935805 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-scripts\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.935821 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.937019 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.937773 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4c48\" (UniqueName: \"kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.937815 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.937891 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-combined-ca-bundle\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.937971 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-secret-key\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.938038 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.938095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.947451 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.949519 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.956852 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:38 crc kubenswrapper[4784]: I1205 12:45:38.963229 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4c48\" (UniqueName: \"kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040068 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-config-data\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040169 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eba67d7-3c83-47c9-bdc2-0946f5839efd-logs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040231 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txcvc\" (UniqueName: \"kubernetes.io/projected/5eba67d7-3c83-47c9-bdc2-0946f5839efd-kube-api-access-txcvc\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040264 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-scripts\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040406 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-combined-ca-bundle\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040505 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-secret-key\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.040805 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-tls-certs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.047910 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eba67d7-3c83-47c9-bdc2-0946f5839efd-logs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.048049 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts\") pod \"horizon-5884d57c44-l8tbz\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.049773 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-scripts\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.050326 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eba67d7-3c83-47c9-bdc2-0946f5839efd-config-data\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.052237 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-secret-key\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.053617 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-combined-ca-bundle\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.054667 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eba67d7-3c83-47c9-bdc2-0946f5839efd-horizon-tls-certs\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.057065 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txcvc\" (UniqueName: \"kubernetes.io/projected/5eba67d7-3c83-47c9-bdc2-0946f5839efd-kube-api-access-txcvc\") pod \"horizon-7cdb6b7d4-mvtql\" (UID: \"5eba67d7-3c83-47c9-bdc2-0946f5839efd\") " pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.160232 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.338790 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.965166 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.991743 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.149:9322/\": dial tcp 10.217.0.149:9322: connect: connection refused" Dec 05 12:45:39 crc kubenswrapper[4784]: I1205 12:45:39.998718 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 05 12:45:40 crc kubenswrapper[4784]: I1205 12:45:40.211725 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 05 12:45:40 crc kubenswrapper[4784]: I1205 12:45:40.245109 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:45:40 crc kubenswrapper[4784]: I1205 12:45:40.760375 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:45:40 crc kubenswrapper[4784]: I1205 12:45:40.828310 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:45:40 crc kubenswrapper[4784]: I1205 12:45:40.828613 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" containerID="cri-o://e3e8edd616699567cb85196ebc38758d923e6b64e26511be84467d941ece2f8b" gracePeriod=10 Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.383042 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.472986 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.607267 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkqqz\" (UniqueName: \"kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.607592 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.607700 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.607834 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.608273 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.608645 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle\") pod \"129c3642-c456-4b24-bef9-9bade50088d7\" (UID: \"129c3642-c456-4b24-bef9-9bade50088d7\") " Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.615895 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz" (OuterVolumeSpecName: "kube-api-access-lkqqz") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "kube-api-access-lkqqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.618332 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.618569 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts" (OuterVolumeSpecName: "scripts") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.622326 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.652052 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.688514 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data" (OuterVolumeSpecName: "config-data") pod "129c3642-c456-4b24-bef9-9bade50088d7" (UID: "129c3642-c456-4b24-bef9-9bade50088d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712471 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712521 4784 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712534 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712550 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkqqz\" (UniqueName: \"kubernetes.io/projected/129c3642-c456-4b24-bef9-9bade50088d7-kube-api-access-lkqqz\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712564 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:41 crc kubenswrapper[4784]: I1205 12:45:41.712589 4784 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/129c3642-c456-4b24-bef9-9bade50088d7-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.198645 4784 generic.go:334] "Generic (PLEG): container finished" podID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerID="e3e8edd616699567cb85196ebc38758d923e6b64e26511be84467d941ece2f8b" exitCode=0 Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.198729 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" event={"ID":"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d","Type":"ContainerDied","Data":"e3e8edd616699567cb85196ebc38758d923e6b64e26511be84467d941ece2f8b"} Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.204166 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"66457239d7fabcc565e70bb8bd21a5a8092777ec566ab39fb8e9f4b2f5c7c198"} Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.206136 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" containerID="cri-o://cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" gracePeriod=30 Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.206525 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pblr2" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.208971 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pblr2" event={"ID":"129c3642-c456-4b24-bef9-9bade50088d7","Type":"ContainerDied","Data":"96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602"} Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.209043 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96bd8177f6c5dc5a8e289ccf6f59786c72b1e9973e50887f36ced6b32dda6602" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.621202 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-pblr2"] Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.629569 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-pblr2"] Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.699550 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-jptmz"] Dec 05 12:45:42 crc kubenswrapper[4784]: E1205 12:45:42.700073 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="129c3642-c456-4b24-bef9-9bade50088d7" containerName="keystone-bootstrap" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.700093 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="129c3642-c456-4b24-bef9-9bade50088d7" containerName="keystone-bootstrap" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.700305 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="129c3642-c456-4b24-bef9-9bade50088d7" containerName="keystone-bootstrap" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.700910 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.703917 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.703963 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.703917 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.704159 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.704298 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lkxwc" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.714474 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jptmz"] Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.831694 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.831823 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.831884 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.831921 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.831978 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqpnp\" (UniqueName: \"kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.832066 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.933791 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqpnp\" (UniqueName: \"kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.933870 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.933920 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.934002 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.934029 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.934061 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.939515 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.939668 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.940823 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.941937 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.945218 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:42 crc kubenswrapper[4784]: I1205 12:45:42.950484 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqpnp\" (UniqueName: \"kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp\") pod \"keystone-bootstrap-jptmz\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:43 crc kubenswrapper[4784]: I1205 12:45:43.012020 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="129c3642-c456-4b24-bef9-9bade50088d7" path="/var/lib/kubelet/pods/129c3642-c456-4b24-bef9-9bade50088d7/volumes" Dec 05 12:45:43 crc kubenswrapper[4784]: I1205 12:45:43.031258 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:45:44 crc kubenswrapper[4784]: E1205 12:45:44.967529 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:44 crc kubenswrapper[4784]: E1205 12:45:44.969842 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:44 crc kubenswrapper[4784]: E1205 12:45:44.974126 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:44 crc kubenswrapper[4784]: E1205 12:45:44.974209 4784 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:45:44 crc kubenswrapper[4784]: I1205 12:45:44.991584 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.149:9322/\": dial tcp 10.217.0.149:9322: connect: connection refused" Dec 05 12:45:46 crc kubenswrapper[4784]: I1205 12:45:46.260367 4784 generic.go:334] "Generic (PLEG): container finished" podID="a94ed534-bf7b-4fdc-9c79-0fa4425cb785" containerID="ad747a9b988792246f6406b792de3f2be8a3af509d4378bec43549e9fc5f91ca" exitCode=0 Dec 05 12:45:46 crc kubenswrapper[4784]: I1205 12:45:46.260479 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xcs6s" event={"ID":"a94ed534-bf7b-4fdc-9c79-0fa4425cb785","Type":"ContainerDied","Data":"ad747a9b988792246f6406b792de3f2be8a3af509d4378bec43549e9fc5f91ca"} Dec 05 12:45:46 crc kubenswrapper[4784]: I1205 12:45:46.263749 4784 generic.go:334] "Generic (PLEG): container finished" podID="1b372793-eadc-4593-af62-97e7d647c76d" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" exitCode=0 Dec 05 12:45:46 crc kubenswrapper[4784]: I1205 12:45:46.263803 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"1b372793-eadc-4593-af62-97e7d647c76d","Type":"ContainerDied","Data":"cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1"} Dec 05 12:45:46 crc kubenswrapper[4784]: I1205 12:45:46.382948 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 05 12:45:49 crc kubenswrapper[4784]: E1205 12:45:49.966451 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:49 crc kubenswrapper[4784]: E1205 12:45:49.967065 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:49 crc kubenswrapper[4784]: E1205 12:45:49.967373 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:49 crc kubenswrapper[4784]: E1205 12:45:49.967403 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:45:49 crc kubenswrapper[4784]: I1205 12:45:49.992044 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.149:9322/\": dial tcp 10.217.0.149:9322: connect: connection refused" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.279599 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.279653 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.279785 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n647h58dh5dfh559h659h8dh548h64bh557h59bh59fh658h667h95h695h9ch695h58bhb4h546h687h585h5f6h4hc4hd5h6bh96hbh57fhbhbcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s5sjg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5fd4688c89-6ndnj_openstack(db3d333b-cc81-49d9-a8bf-1bdad94739c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.282370 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-5fd4688c89-6ndnj" podUID="db3d333b-cc81-49d9-a8bf-1bdad94739c2" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.295691 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.295766 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.295975 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f7h665hddh5d6hf9h5c7h57fh675h68h67dh8hb7h696h6fh586h646h5bfh75h5d4h75h699h5f9h7dh59h644hdch94hd6h5c6h669h6bh594q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6xkcl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-74474b4965-5h5n8_openstack(5d78607c-ec1e-429a-b458-217f7de86abc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.298607 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-74474b4965-5h5n8" podUID="5d78607c-ec1e-429a-b458-217f7de86abc" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.304706 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.304743 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.304839 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ncfh56bhf8h77h54ch577h5chb7h5d9h5bbhbch57ch564h5d6h5cch5b7h65ch667h54dh685h67fhf9h559h87h58dh65bh59dhf4h644h577h649h558q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4mkqw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-57f74fb67c-6g957_openstack(23e368b2-6047-40b1-afd5-7899c48c94ad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.307701 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-57f74fb67c-6g957" podUID="23e368b2-6047-40b1-afd5-7899c48c94ad" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.806584 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-watcher-decision-engine:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.806615 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-watcher-decision-engine:watcher_latest" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.806728 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:watcher-decision-engine,Image:38.102.83.151:5001/podified-master-centos10/openstack-watcher-decision-engine:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n596hf8h665hbdh674h5fbh584h667h599h579h56ch4h55bh94h59h5h58bh5d7h67fh594hc7h98h77hdh68fh545hc5h546h574h66h569h97q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:watcher-decision-engine-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/watcher,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:custom-prometheus-ca,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/prometheus/ca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4xmx9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -f -r DRST watcher-decision-engine],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -f -r DRST watcher-decision-engine],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42451,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -f -r DRST watcher-decision-engine],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-decision-engine-0_openstack(31a07479-cab0-4561-b49b-73b1c3dad744): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:45:50 crc kubenswrapper[4784]: E1205 12:45:50.808136 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/watcher-decision-engine-0" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" Dec 05 12:45:50 crc kubenswrapper[4784]: I1205 12:45:50.907553 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:50 crc kubenswrapper[4784]: I1205 12:45:50.914385 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.004940 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data\") pod \"23695adb-c981-437e-b37d-cef86d57d515\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005010 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xkcl\" (UniqueName: \"kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl\") pod \"5d78607c-ec1e-429a-b458-217f7de86abc\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005159 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts\") pod \"5d78607c-ec1e-429a-b458-217f7de86abc\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005226 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle\") pod \"23695adb-c981-437e-b37d-cef86d57d515\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005276 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca\") pod \"23695adb-c981-437e-b37d-cef86d57d515\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005312 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs\") pod \"5d78607c-ec1e-429a-b458-217f7de86abc\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005371 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key\") pod \"5d78607c-ec1e-429a-b458-217f7de86abc\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005464 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data\") pod \"5d78607c-ec1e-429a-b458-217f7de86abc\" (UID: \"5d78607c-ec1e-429a-b458-217f7de86abc\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005498 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nscz6\" (UniqueName: \"kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6\") pod \"23695adb-c981-437e-b37d-cef86d57d515\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.005518 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs\") pod \"23695adb-c981-437e-b37d-cef86d57d515\" (UID: \"23695adb-c981-437e-b37d-cef86d57d515\") " Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.006580 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs" (OuterVolumeSpecName: "logs") pod "23695adb-c981-437e-b37d-cef86d57d515" (UID: "23695adb-c981-437e-b37d-cef86d57d515"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.007147 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data" (OuterVolumeSpecName: "config-data") pod "5d78607c-ec1e-429a-b458-217f7de86abc" (UID: "5d78607c-ec1e-429a-b458-217f7de86abc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.007651 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs" (OuterVolumeSpecName: "logs") pod "5d78607c-ec1e-429a-b458-217f7de86abc" (UID: "5d78607c-ec1e-429a-b458-217f7de86abc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.007975 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts" (OuterVolumeSpecName: "scripts") pod "5d78607c-ec1e-429a-b458-217f7de86abc" (UID: "5d78607c-ec1e-429a-b458-217f7de86abc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.022308 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl" (OuterVolumeSpecName: "kube-api-access-6xkcl") pod "5d78607c-ec1e-429a-b458-217f7de86abc" (UID: "5d78607c-ec1e-429a-b458-217f7de86abc"). InnerVolumeSpecName "kube-api-access-6xkcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.022694 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5d78607c-ec1e-429a-b458-217f7de86abc" (UID: "5d78607c-ec1e-429a-b458-217f7de86abc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.048438 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6" (OuterVolumeSpecName: "kube-api-access-nscz6") pod "23695adb-c981-437e-b37d-cef86d57d515" (UID: "23695adb-c981-437e-b37d-cef86d57d515"). InnerVolumeSpecName "kube-api-access-nscz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.076438 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "23695adb-c981-437e-b37d-cef86d57d515" (UID: "23695adb-c981-437e-b37d-cef86d57d515"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.088376 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23695adb-c981-437e-b37d-cef86d57d515" (UID: "23695adb-c981-437e-b37d-cef86d57d515"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.109622 4784 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d78607c-ec1e-429a-b458-217f7de86abc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110075 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110919 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nscz6\" (UniqueName: \"kubernetes.io/projected/23695adb-c981-437e-b37d-cef86d57d515-kube-api-access-nscz6\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110944 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23695adb-c981-437e-b37d-cef86d57d515-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110957 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xkcl\" (UniqueName: \"kubernetes.io/projected/5d78607c-ec1e-429a-b458-217f7de86abc-kube-api-access-6xkcl\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110969 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d78607c-ec1e-429a-b458-217f7de86abc-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110980 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.110992 4784 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.111001 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d78607c-ec1e-429a-b458-217f7de86abc-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.121379 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data" (OuterVolumeSpecName: "config-data") pod "23695adb-c981-437e-b37d-cef86d57d515" (UID: "23695adb-c981-437e-b37d-cef86d57d515"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.212268 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23695adb-c981-437e-b37d-cef86d57d515-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.331869 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"23695adb-c981-437e-b37d-cef86d57d515","Type":"ContainerDied","Data":"1fbd721a50e421d6dd1bb22839a59d38d3aa6766f57ca15f1191a0d564e635df"} Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.331955 4784 scope.go:117] "RemoveContainer" containerID="91b4a91b5f7fffd06b49ced759e917b39298c704edd1d39156031be97bd54f9d" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.331891 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.334025 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74474b4965-5h5n8" event={"ID":"5d78607c-ec1e-429a-b458-217f7de86abc","Type":"ContainerDied","Data":"ad3631de0d715ed39a2dab76ba4ebd62242cfc8060c71787c5bd1abe496e1f93"} Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.334091 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74474b4965-5h5n8" Dec 05 12:45:51 crc kubenswrapper[4784]: E1205 12:45:51.335632 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-watcher-decision-engine:watcher_latest\\\"\"" pod="openstack/watcher-decision-engine-0" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.406677 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.436667 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.480246 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:51 crc kubenswrapper[4784]: E1205 12:45:51.480775 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.480808 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" Dec 05 12:45:51 crc kubenswrapper[4784]: E1205 12:45:51.480831 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api-log" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.480841 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api-log" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.481061 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.481087 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="23695adb-c981-437e-b37d-cef86d57d515" containerName="watcher-api-log" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.482109 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.485110 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.496758 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.509865 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-74474b4965-5h5n8"] Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.529511 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.530741 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.530812 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxstq\" (UniqueName: \"kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.530866 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.530969 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.531005 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.632482 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.633171 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.633277 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxstq\" (UniqueName: \"kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.633346 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.633533 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.634678 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.637437 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.638786 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.639827 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.650112 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxstq\" (UniqueName: \"kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq\") pod \"watcher-api-0\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " pod="openstack/watcher-api-0" Dec 05 12:45:51 crc kubenswrapper[4784]: I1205 12:45:51.812887 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:45:53 crc kubenswrapper[4784]: I1205 12:45:53.009418 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23695adb-c981-437e-b37d-cef86d57d515" path="/var/lib/kubelet/pods/23695adb-c981-437e-b37d-cef86d57d515/volumes" Dec 05 12:45:53 crc kubenswrapper[4784]: I1205 12:45:53.010704 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d78607c-ec1e-429a-b458-217f7de86abc" path="/var/lib/kubelet/pods/5d78607c-ec1e-429a-b458-217f7de86abc/volumes" Dec 05 12:45:54 crc kubenswrapper[4784]: E1205 12:45:54.965218 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:54 crc kubenswrapper[4784]: E1205 12:45:54.966014 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:54 crc kubenswrapper[4784]: E1205 12:45:54.966278 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:54 crc kubenswrapper[4784]: E1205 12:45:54.966308 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:45:56 crc kubenswrapper[4784]: I1205 12:45:56.383363 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 05 12:45:56 crc kubenswrapper[4784]: I1205 12:45:56.383599 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:45:58 crc kubenswrapper[4784]: I1205 12:45:58.403655 4784 generic.go:334] "Generic (PLEG): container finished" podID="7216e2bb-f775-4b6b-9f34-b966f26f4002" containerID="cef934a659ba928393f7fd2f3159621d3c13b65bcacd8f43dbaa52033ec0dda7" exitCode=0 Dec 05 12:45:58 crc kubenswrapper[4784]: I1205 12:45:58.403892 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vt6gx" event={"ID":"7216e2bb-f775-4b6b-9f34-b966f26f4002","Type":"ContainerDied","Data":"cef934a659ba928393f7fd2f3159621d3c13b65bcacd8f43dbaa52033ec0dda7"} Dec 05 12:45:59 crc kubenswrapper[4784]: E1205 12:45:59.965903 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:59 crc kubenswrapper[4784]: E1205 12:45:59.966631 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:59 crc kubenswrapper[4784]: E1205 12:45:59.966941 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:45:59 crc kubenswrapper[4784]: E1205 12:45:59.966969 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:46:01 crc kubenswrapper[4784]: I1205 12:46:01.383815 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 05 12:46:04 crc kubenswrapper[4784]: E1205 12:46:04.965450 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:46:04 crc kubenswrapper[4784]: E1205 12:46:04.966551 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:46:04 crc kubenswrapper[4784]: E1205 12:46:04.967017 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 05 12:46:04 crc kubenswrapper[4784]: E1205 12:46:04.967107 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.385313 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.842112 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xcs6s" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.866213 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931003 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle\") pod \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc\") pod \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931165 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data\") pod \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931258 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwcrf\" (UniqueName: \"kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf\") pod \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931858 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb\") pod \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931894 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data\") pod \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\" (UID: \"a94ed534-bf7b-4fdc-9c79-0fa4425cb785\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931922 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gsf4\" (UniqueName: \"kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4\") pod \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931939 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb\") pod \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.931975 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config\") pod \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\" (UID: \"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d\") " Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.936155 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf" (OuterVolumeSpecName: "kube-api-access-xwcrf") pod "a94ed534-bf7b-4fdc-9c79-0fa4425cb785" (UID: "a94ed534-bf7b-4fdc-9c79-0fa4425cb785"). InnerVolumeSpecName "kube-api-access-xwcrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.963571 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a94ed534-bf7b-4fdc-9c79-0fa4425cb785" (UID: "a94ed534-bf7b-4fdc-9c79-0fa4425cb785"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.965844 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4" (OuterVolumeSpecName: "kube-api-access-6gsf4") pod "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" (UID: "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d"). InnerVolumeSpecName "kube-api-access-6gsf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:06 crc kubenswrapper[4784]: I1205 12:46:06.967204 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a94ed534-bf7b-4fdc-9c79-0fa4425cb785" (UID: "a94ed534-bf7b-4fdc-9c79-0fa4425cb785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.001386 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" (UID: "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.004782 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" (UID: "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.030837 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config" (OuterVolumeSpecName: "config") pod "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" (UID: "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.034899 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.034969 4784 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.034982 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gsf4\" (UniqueName: \"kubernetes.io/projected/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-kube-api-access-6gsf4\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.034996 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.035008 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.035021 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.036357 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwcrf\" (UniqueName: \"kubernetes.io/projected/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-kube-api-access-xwcrf\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.057360 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" (UID: "bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.060881 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data" (OuterVolumeSpecName: "config-data") pod "a94ed534-bf7b-4fdc-9c79-0fa4425cb785" (UID: "a94ed534-bf7b-4fdc-9c79-0fa4425cb785"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.139568 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.139614 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a94ed534-bf7b-4fdc-9c79-0fa4425cb785-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.377923 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:46:07 crc kubenswrapper[4784]: E1205 12:46:07.386163 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 05 12:46:07 crc kubenswrapper[4784]: E1205 12:46:07.386227 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 05 12:46:07 crc kubenswrapper[4784]: E1205 12:46:07.386346 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:38.102.83.151:5001/podified-master-centos10/openstack-barbican-api:watcher_latest,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x9h8j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-bwtqt_openstack(c921ceb1-e577-4b4a-be99-3544491930d3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:46:07 crc kubenswrapper[4784]: E1205 12:46:07.387573 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-bwtqt" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.408892 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.415658 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.443830 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data\") pod \"23e368b2-6047-40b1-afd5-7899c48c94ad\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.443977 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5sjg\" (UniqueName: \"kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg\") pod \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444003 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mkqw\" (UniqueName: \"kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw\") pod \"23e368b2-6047-40b1-afd5-7899c48c94ad\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444020 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts\") pod \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444051 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts\") pod \"23e368b2-6047-40b1-afd5-7899c48c94ad\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444068 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data\") pod \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444087 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs\") pod \"23e368b2-6047-40b1-afd5-7899c48c94ad\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444122 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle\") pod \"7216e2bb-f775-4b6b-9f34-b966f26f4002\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444163 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key\") pod \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444224 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs\") pod \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\" (UID: \"db3d333b-cc81-49d9-a8bf-1bdad94739c2\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444244 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key\") pod \"23e368b2-6047-40b1-afd5-7899c48c94ad\" (UID: \"23e368b2-6047-40b1-afd5-7899c48c94ad\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444273 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nzbd\" (UniqueName: \"kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd\") pod \"7216e2bb-f775-4b6b-9f34-b966f26f4002\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.444290 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config\") pod \"7216e2bb-f775-4b6b-9f34-b966f26f4002\" (UID: \"7216e2bb-f775-4b6b-9f34-b966f26f4002\") " Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.446260 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts" (OuterVolumeSpecName: "scripts") pod "db3d333b-cc81-49d9-a8bf-1bdad94739c2" (UID: "db3d333b-cc81-49d9-a8bf-1bdad94739c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.447110 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs" (OuterVolumeSpecName: "logs") pod "23e368b2-6047-40b1-afd5-7899c48c94ad" (UID: "23e368b2-6047-40b1-afd5-7899c48c94ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.449053 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts" (OuterVolumeSpecName: "scripts") pod "23e368b2-6047-40b1-afd5-7899c48c94ad" (UID: "23e368b2-6047-40b1-afd5-7899c48c94ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.450861 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data" (OuterVolumeSpecName: "config-data") pod "db3d333b-cc81-49d9-a8bf-1bdad94739c2" (UID: "db3d333b-cc81-49d9-a8bf-1bdad94739c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.451574 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data" (OuterVolumeSpecName: "config-data") pod "23e368b2-6047-40b1-afd5-7899c48c94ad" (UID: "23e368b2-6047-40b1-afd5-7899c48c94ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.451568 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "db3d333b-cc81-49d9-a8bf-1bdad94739c2" (UID: "db3d333b-cc81-49d9-a8bf-1bdad94739c2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.452458 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw" (OuterVolumeSpecName: "kube-api-access-4mkqw") pod "23e368b2-6047-40b1-afd5-7899c48c94ad" (UID: "23e368b2-6047-40b1-afd5-7899c48c94ad"). InnerVolumeSpecName "kube-api-access-4mkqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.455218 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "23e368b2-6047-40b1-afd5-7899c48c94ad" (UID: "23e368b2-6047-40b1-afd5-7899c48c94ad"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.456388 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd" (OuterVolumeSpecName: "kube-api-access-5nzbd") pod "7216e2bb-f775-4b6b-9f34-b966f26f4002" (UID: "7216e2bb-f775-4b6b-9f34-b966f26f4002"). InnerVolumeSpecName "kube-api-access-5nzbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.457156 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg" (OuterVolumeSpecName: "kube-api-access-s5sjg") pod "db3d333b-cc81-49d9-a8bf-1bdad94739c2" (UID: "db3d333b-cc81-49d9-a8bf-1bdad94739c2"). InnerVolumeSpecName "kube-api-access-s5sjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.459130 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs" (OuterVolumeSpecName: "logs") pod "db3d333b-cc81-49d9-a8bf-1bdad94739c2" (UID: "db3d333b-cc81-49d9-a8bf-1bdad94739c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.504714 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-xcs6s" event={"ID":"a94ed534-bf7b-4fdc-9c79-0fa4425cb785","Type":"ContainerDied","Data":"f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e"} Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.504757 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f374ed3f56ff8b358eea9b4fee8a12c667922581c6194a3822342ea51b02bc0e" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.504850 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-xcs6s" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.510158 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vt6gx" event={"ID":"7216e2bb-f775-4b6b-9f34-b966f26f4002","Type":"ContainerDied","Data":"30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a"} Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.510205 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30cc5752d81ed81ada223d3294c06076b75b53e26f10b2322dab43b956c7822a" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.510274 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vt6gx" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.513274 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57f74fb67c-6g957" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.513628 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57f74fb67c-6g957" event={"ID":"23e368b2-6047-40b1-afd5-7899c48c94ad","Type":"ContainerDied","Data":"f1a2583d87c388c0ffd85a331ab1748eda2988800ff274eb5bd4ddee32e4161f"} Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.514204 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config" (OuterVolumeSpecName: "config") pod "7216e2bb-f775-4b6b-9f34-b966f26f4002" (UID: "7216e2bb-f775-4b6b-9f34-b966f26f4002"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.519492 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7216e2bb-f775-4b6b-9f34-b966f26f4002" (UID: "7216e2bb-f775-4b6b-9f34-b966f26f4002"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.521688 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5fd4688c89-6ndnj" event={"ID":"db3d333b-cc81-49d9-a8bf-1bdad94739c2","Type":"ContainerDied","Data":"a72c05a16be9268882efca36976bbc2f5d1041de56488fe7181a5dba0cad8c59"} Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.521703 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5fd4688c89-6ndnj" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.525363 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.525944 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" event={"ID":"bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d","Type":"ContainerDied","Data":"b3f67aa82ef88bb5ec297d2ee4b30ced67c11c89059264421c84863f5f2e9e45"} Dec 05 12:46:07 crc kubenswrapper[4784]: E1205 12:46:07.543731 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-barbican-api:watcher_latest\\\"\"" pod="openstack/barbican-db-sync-bwtqt" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546043 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db3d333b-cc81-49d9-a8bf-1bdad94739c2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546072 4784 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/23e368b2-6047-40b1-afd5-7899c48c94ad-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546086 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nzbd\" (UniqueName: \"kubernetes.io/projected/7216e2bb-f775-4b6b-9f34-b966f26f4002-kube-api-access-5nzbd\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546100 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546111 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546123 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5sjg\" (UniqueName: \"kubernetes.io/projected/db3d333b-cc81-49d9-a8bf-1bdad94739c2-kube-api-access-s5sjg\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546134 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mkqw\" (UniqueName: \"kubernetes.io/projected/23e368b2-6047-40b1-afd5-7899c48c94ad-kube-api-access-4mkqw\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546145 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546157 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23e368b2-6047-40b1-afd5-7899c48c94ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546167 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3d333b-cc81-49d9-a8bf-1bdad94739c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546177 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23e368b2-6047-40b1-afd5-7899c48c94ad-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546204 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7216e2bb-f775-4b6b-9f34-b966f26f4002-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.546216 4784 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/db3d333b-cc81-49d9-a8bf-1bdad94739c2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.616221 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.627084 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-57f74fb67c-6g957"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.655574 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.662553 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5fd4688c89-6ndnj"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.669441 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.676017 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dd44fd6df-gsqs9"] Dec 05 12:46:07 crc kubenswrapper[4784]: I1205 12:46:07.818105 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cdb6b7d4-mvtql"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.274585 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:08 crc kubenswrapper[4784]: E1205 12:46:08.275381 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="init" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275406 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="init" Dec 05 12:46:08 crc kubenswrapper[4784]: E1205 12:46:08.275420 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7216e2bb-f775-4b6b-9f34-b966f26f4002" containerName="neutron-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275428 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7216e2bb-f775-4b6b-9f34-b966f26f4002" containerName="neutron-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: E1205 12:46:08.275447 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94ed534-bf7b-4fdc-9c79-0fa4425cb785" containerName="glance-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275455 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94ed534-bf7b-4fdc-9c79-0fa4425cb785" containerName="glance-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: E1205 12:46:08.275488 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275496 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275731 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7216e2bb-f775-4b6b-9f34-b966f26f4002" containerName="neutron-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275757 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.275771 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a94ed534-bf7b-4fdc-9c79-0fa4425cb785" containerName="glance-db-sync" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.278469 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.298527 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.361733 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.361787 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lxp6\" (UniqueName: \"kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.361846 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.361892 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.361912 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.463115 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.463165 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lxp6\" (UniqueName: \"kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.463235 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.463281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.463304 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.464010 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.464032 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.464683 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.464780 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.493323 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lxp6\" (UniqueName: \"kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6\") pod \"dnsmasq-dns-667d465cbf-r6dqc\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.600363 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.781070 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.816934 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.819394 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.874087 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.874161 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.874295 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.874370 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v69s6\" (UniqueName: \"kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.874387 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.891074 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.893644 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.895061 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.900690 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.901200 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.901308 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-bpx25" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.911689 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.938464 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975533 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975587 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975610 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975645 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975675 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975720 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975743 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9x6d\" (UniqueName: \"kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975767 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v69s6\" (UniqueName: \"kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975786 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.975815 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.976510 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.976561 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.976935 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:08 crc kubenswrapper[4784]: I1205 12:46:08.978899 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:08.998507 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v69s6\" (UniqueName: \"kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6\") pod \"dnsmasq-dns-7f66b8c67-hfrbq\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.008426 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23e368b2-6047-40b1-afd5-7899c48c94ad" path="/var/lib/kubelet/pods/23e368b2-6047-40b1-afd5-7899c48c94ad/volumes" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.009052 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" path="/var/lib/kubelet/pods/bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d/volumes" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.011337 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db3d333b-cc81-49d9-a8bf-1bdad94739c2" path="/var/lib/kubelet/pods/db3d333b-cc81-49d9-a8bf-1bdad94739c2/volumes" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.077309 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.078068 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9x6d\" (UniqueName: \"kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.078357 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.078542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.078767 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.081787 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.082420 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.082470 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.084236 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.101294 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9x6d\" (UniqueName: \"kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d\") pod \"neutron-56d56698b8-d88q6\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.164512 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.187313 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.189694 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.191438 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.191749 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mz2l2" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.191807 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.219145 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.228350 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383005 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383062 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383094 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383110 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383299 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383446 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9m8p\" (UniqueName: \"kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.383599 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486375 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9m8p\" (UniqueName: \"kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486450 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486520 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486540 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486571 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486586 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.486618 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.487698 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.487876 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.488291 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.494965 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.499684 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.500932 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.501774 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.503334 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.504306 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.506587 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.511586 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9m8p\" (UniqueName: \"kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.556826 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.644901 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.657123 4784 scope.go:117] "RemoveContainer" containerID="cf01e0f8a7896ba8a46a0e2e48080038f0f871a17c95e64d21692fc651fb129c" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.689927 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.689977 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.690022 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.690041 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.690109 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wbp9\" (UniqueName: \"kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.690175 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.690206 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: E1205 12:46:09.700993 4784 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 05 12:46:09 crc kubenswrapper[4784]: E1205 12:46:09.701036 4784 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.151:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 05 12:46:09 crc kubenswrapper[4784]: E1205 12:46:09.701141 4784 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.151:5001/podified-master-centos10/openstack-cinder-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-knrkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-24k4j_openstack(e187592b-b331-4144-9a27-ba81e79121b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 12:46:09 crc kubenswrapper[4784]: E1205 12:46:09.702627 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-24k4j" podUID="e187592b-b331-4144-9a27-ba81e79121b6" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.793994 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data\") pod \"1b372793-eadc-4593-af62-97e7d647c76d\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794356 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppxmh\" (UniqueName: \"kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh\") pod \"1b372793-eadc-4593-af62-97e7d647c76d\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794434 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs\") pod \"1b372793-eadc-4593-af62-97e7d647c76d\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794610 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle\") pod \"1b372793-eadc-4593-af62-97e7d647c76d\" (UID: \"1b372793-eadc-4593-af62-97e7d647c76d\") " Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794790 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794826 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794862 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794880 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.794948 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wbp9\" (UniqueName: \"kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.795018 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.795042 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.795491 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.795728 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.798630 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.799569 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.801056 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs" (OuterVolumeSpecName: "logs") pod "1b372793-eadc-4593-af62-97e7d647c76d" (UID: "1b372793-eadc-4593-af62-97e7d647c76d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.805467 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh" (OuterVolumeSpecName: "kube-api-access-ppxmh") pod "1b372793-eadc-4593-af62-97e7d647c76d" (UID: "1b372793-eadc-4593-af62-97e7d647c76d"). InnerVolumeSpecName "kube-api-access-ppxmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.806271 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.809861 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.820091 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wbp9\" (UniqueName: \"kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.827660 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.857345 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.862952 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b372793-eadc-4593-af62-97e7d647c76d" (UID: "1b372793-eadc-4593-af62-97e7d647c76d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.880334 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data" (OuterVolumeSpecName: "config-data") pod "1b372793-eadc-4593-af62-97e7d647c76d" (UID: "1b372793-eadc-4593-af62-97e7d647c76d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.897159 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.897216 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b372793-eadc-4593-af62-97e7d647c76d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.897227 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppxmh\" (UniqueName: \"kubernetes.io/projected/1b372793-eadc-4593-af62-97e7d647c76d-kube-api-access-ppxmh\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.897240 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b372793-eadc-4593-af62-97e7d647c76d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.920397 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:09 crc kubenswrapper[4784]: I1205 12:46:09.952361 4784 scope.go:117] "RemoveContainer" containerID="e3e8edd616699567cb85196ebc38758d923e6b64e26511be84467d941ece2f8b" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.104042 4784 scope.go:117] "RemoveContainer" containerID="b7b44418f91ea98db4f90c0eb3f4c6c045d22c887bac95d723b9b1e16849661e" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.204498 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.311519 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jptmz"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.602049 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zx82c" event={"ID":"851aaea1-2d18-4f91-b410-5fdb0a7f42ec","Type":"ContainerStarted","Data":"064e84eff9398f531da02547908007f542373a3dbc6a82449493cad6c1506a07"} Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.603015 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.605967 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cdb6b7d4-mvtql" event={"ID":"5eba67d7-3c83-47c9-bdc2-0946f5839efd","Type":"ContainerStarted","Data":"59308adb57e5e7055e6786e7817586d27b77f1cccb155884cabc0d3cc639d86f"} Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.635443 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:10 crc kubenswrapper[4784]: W1205 12:46:10.644059 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11bdb484_f2ce_4363_a365_c6fa7a15d4ad.slice/crio-79d1474f6ba6c8f33548dadd2f02a33de661b0d308161ea3797435004b6c7f1a WatchSource:0}: Error finding container 79d1474f6ba6c8f33548dadd2f02a33de661b0d308161ea3797435004b6c7f1a: Status 404 returned error can't find the container with id 79d1474f6ba6c8f33548dadd2f02a33de661b0d308161ea3797435004b6c7f1a Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.646082 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"39ad401e75ae59d32e6f671055ddc065147b6adddd5a96527a338fc69c8c84b3"} Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.648986 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"1b372793-eadc-4593-af62-97e7d647c76d","Type":"ContainerDied","Data":"f64fbdcfff6e1d575ee3e74a37671546dd15cffdcaf6eb862b9e06ee6304fdb4"} Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.649028 4784 scope.go:117] "RemoveContainer" containerID="cdf80954ad7cdb8479125a839315ea2440a4b028301002e1b7288c6041aa30a1" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.649664 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.658649 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.658687 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerStarted","Data":"7c80f143aa686ba7b4ffc83f9555b71a70da036b98f4541df45777946174d544"} Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.660971 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jptmz" event={"ID":"4b05cb70-952a-4d24-a3e5-cbbff5d53021","Type":"ContainerStarted","Data":"ed57b2197543792be23f3be3323dad10640cc3786ca25add77d0025fdd45c895"} Dec 05 12:46:10 crc kubenswrapper[4784]: E1205 12:46:10.662169 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.151:5001/podified-master-centos10/openstack-cinder-api:watcher_latest\\\"\"" pod="openstack/cinder-db-sync-24k4j" podUID="e187592b-b331-4144-9a27-ba81e79121b6" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.667216 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zx82c" podStartSLOduration=7.500235151 podStartE2EDuration="41.667199544s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:33.232728083 +0000 UTC m=+1212.652794898" lastFinishedPulling="2025-12-05 12:46:07.399692476 +0000 UTC m=+1246.819759291" observedRunningTime="2025-12-05 12:46:10.621018514 +0000 UTC m=+1250.041085329" watchObservedRunningTime="2025-12-05 12:46:10.667199544 +0000 UTC m=+1250.087266359" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.906800 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.948373 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.980792 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:46:10 crc kubenswrapper[4784]: E1205 12:46:10.981235 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.981248 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.981425 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b372793-eadc-4593-af62-97e7d647c76d" containerName="watcher-applier" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.982055 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.988950 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:46:10 crc kubenswrapper[4784]: I1205 12:46:10.993299 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.051183 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b372793-eadc-4593-af62-97e7d647c76d" path="/var/lib/kubelet/pods/1b372793-eadc-4593-af62-97e7d647c76d/volumes" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.051847 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.051889 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.053893 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-config-data\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.053989 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.054059 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e88e0cbd-da67-4123-97dd-6840f902d9f1-logs\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.054237 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5btg\" (UniqueName: \"kubernetes.io/projected/e88e0cbd-da67-4123-97dd-6840f902d9f1-kube-api-access-g5btg\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.153580 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.156142 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-config-data\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.156339 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.156432 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e88e0cbd-da67-4123-97dd-6840f902d9f1-logs\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.156570 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5btg\" (UniqueName: \"kubernetes.io/projected/e88e0cbd-da67-4123-97dd-6840f902d9f1-kube-api-access-g5btg\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.158178 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e88e0cbd-da67-4123-97dd-6840f902d9f1-logs\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.161507 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-config-data\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.173536 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5btg\" (UniqueName: \"kubernetes.io/projected/e88e0cbd-da67-4123-97dd-6840f902d9f1-kube-api-access-g5btg\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.192131 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e88e0cbd-da67-4123-97dd-6840f902d9f1-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e88e0cbd-da67-4123-97dd-6840f902d9f1\") " pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.386500 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5dd44fd6df-gsqs9" podUID="bb48c645-1ac8-450e-9cc4-c8ff59d3fb0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.543665 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.700586 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerStarted","Data":"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.711342 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cdb6b7d4-mvtql" event={"ID":"5eba67d7-3c83-47c9-bdc2-0946f5839efd","Type":"ContainerStarted","Data":"307b3d3693f5d67a1877b325db3f8c8dd731e6c63b883f0f3ed859bfc39d859f"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.714630 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerStarted","Data":"bda0177b249528eacb150937efea2f45580d5c2e3a8fd5f6a65977059b8ad092"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.729148 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerStarted","Data":"8063c9468605028eab51a8a58c527881f9a9daa03504f5da0aa2c993235b88cb"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.746988 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerStarted","Data":"9ae2fd9a85d67e8d89687efbdea1588a1417a78ed52f566976c127a094db0480"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.773676 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerStarted","Data":"f57cad56c62db7e8788d623751fd929c4d872b72d6fdec0e64ed563c93a04494"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.780572 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" event={"ID":"1e1035a9-06b4-47c8-8781-e46a35a2f3c9","Type":"ContainerStarted","Data":"82057a35d9107d78e15475ddb5111920999bdf98b8bf5c8f2e2612b2fecefff6"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.784932 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" event={"ID":"7fad5aee-990f-4c1b-8610-db4cb774dcf7","Type":"ContainerStarted","Data":"2bd86b235223ca475eb576c874b97dea7d7b2002515953ca1df1c99b34678063"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.790766 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerStarted","Data":"427abff3eac7473e838fa85f2097041270a6b80a6e6f065dfda211890bb77d19"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.793967 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jptmz" event={"ID":"4b05cb70-952a-4d24-a3e5-cbbff5d53021","Type":"ContainerStarted","Data":"ed101b41af71c06e8fc74105f382f320686edc591a1254573e361ce8b50553b4"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.807211 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerStarted","Data":"79d1474f6ba6c8f33548dadd2f02a33de661b0d308161ea3797435004b6c7f1a"} Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.818892 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=6.023625367 podStartE2EDuration="42.818868932s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:33.244312515 +0000 UTC m=+1212.664379340" lastFinishedPulling="2025-12-05 12:46:10.03955609 +0000 UTC m=+1249.459622905" observedRunningTime="2025-12-05 12:46:11.77424841 +0000 UTC m=+1251.194315225" watchObservedRunningTime="2025-12-05 12:46:11.818868932 +0000 UTC m=+1251.238935747" Dec 05 12:46:11 crc kubenswrapper[4784]: I1205 12:46:11.820210 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-jptmz" podStartSLOduration=29.820205194 podStartE2EDuration="29.820205194s" podCreationTimestamp="2025-12-05 12:45:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:11.810581794 +0000 UTC m=+1251.230648619" watchObservedRunningTime="2025-12-05 12:46:11.820205194 +0000 UTC m=+1251.240272009" Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.546177 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.649536 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.870348 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerStarted","Data":"2de19768eaa35e73bb81e6523b0c64a9bf3209f080fef2729cad20d498ea00d0"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.911674 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerStarted","Data":"96467bc389028b09dc49f48cd3bfca18326fc12165a5b3523bbaff001ad5c886"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.914004 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerStarted","Data":"5b8692acbe09d6fb2de78c1e6356949da4627e9def7da5ae22271c717d381793"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.918233 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cdb6b7d4-mvtql" event={"ID":"5eba67d7-3c83-47c9-bdc2-0946f5839efd","Type":"ContainerStarted","Data":"f14773b08f646dc4975813e3318664f4b747f24f8c1219af1262a511483df12d"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.934396 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.934692 4784 generic.go:334] "Generic (PLEG): container finished" podID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerID="f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39" exitCode=0 Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.934983 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" event={"ID":"1e1035a9-06b4-47c8-8781-e46a35a2f3c9","Type":"ContainerDied","Data":"f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.952307 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7cdb6b7d4-mvtql" podStartSLOduration=34.304423104 podStartE2EDuration="34.952291571s" podCreationTimestamp="2025-12-05 12:45:38 +0000 UTC" firstStartedPulling="2025-12-05 12:46:09.727485956 +0000 UTC m=+1249.147552761" lastFinishedPulling="2025-12-05 12:46:10.375354413 +0000 UTC m=+1249.795421228" observedRunningTime="2025-12-05 12:46:12.94868038 +0000 UTC m=+1252.368747195" watchObservedRunningTime="2025-12-05 12:46:12.952291571 +0000 UTC m=+1252.372358386" Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.967139 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"22f35536f805fbc6ff19e68f6ff281372ab7993c8a563b65899cbdd7373af5c0"} Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.983002 4784 generic.go:334] "Generic (PLEG): container finished" podID="7fad5aee-990f-4c1b-8610-db4cb774dcf7" containerID="ba9b5bc98e2c204d52df78b2061f714a1eca06cf77c16f7c19171daa58182578" exitCode=0 Dec 05 12:46:12 crc kubenswrapper[4784]: I1205 12:46:12.983811 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" event={"ID":"7fad5aee-990f-4c1b-8610-db4cb774dcf7","Type":"ContainerDied","Data":"ba9b5bc98e2c204d52df78b2061f714a1eca06cf77c16f7c19171daa58182578"} Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.900288 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.995328 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerStarted","Data":"a2d57e911d363f308109c061480931a780e722a62a5199faa807f05c0602cd68"} Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.995742 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.998856 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" event={"ID":"7fad5aee-990f-4c1b-8610-db4cb774dcf7","Type":"ContainerDied","Data":"2bd86b235223ca475eb576c874b97dea7d7b2002515953ca1df1c99b34678063"} Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.998917 4784 scope.go:117] "RemoveContainer" containerID="ba9b5bc98e2c204d52df78b2061f714a1eca06cf77c16f7c19171daa58182578" Dec 05 12:46:13 crc kubenswrapper[4784]: I1205 12:46:13.999035 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667d465cbf-r6dqc" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.001651 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerStarted","Data":"773546a469ba89b933adcb75ad82aff9b851eb2d42eb822c7c78e5fe1ae30e3e"} Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.004948 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerStarted","Data":"4a5c7f44debaf13680fb6230e39d2ed7ba59cfd6b23fc1026cbf066d344a6003"} Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.005344 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.006157 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e88e0cbd-da67-4123-97dd-6840f902d9f1","Type":"ContainerStarted","Data":"1f75cdec0bf31bc774c3ca56812a6247e34f676dbd1bbcf18706b8a88202e0c2"} Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.026058 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=23.026040661 podStartE2EDuration="23.026040661s" podCreationTimestamp="2025-12-05 12:45:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:14.019640691 +0000 UTC m=+1253.439707506" watchObservedRunningTime="2025-12-05 12:46:14.026040661 +0000 UTC m=+1253.446107476" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.030428 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"94f04505553ee948f680f22ff892e17d462fad434fba9cc85702eaca4f2aa4ba"} Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.032300 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config\") pod \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.032368 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb\") pod \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.032503 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb\") pod \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.032582 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lxp6\" (UniqueName: \"kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6\") pod \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.032678 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc\") pod \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\" (UID: \"7fad5aee-990f-4c1b-8610-db4cb774dcf7\") " Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.044376 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6" (OuterVolumeSpecName: "kube-api-access-8lxp6") pod "7fad5aee-990f-4c1b-8610-db4cb774dcf7" (UID: "7fad5aee-990f-4c1b-8610-db4cb774dcf7"). InnerVolumeSpecName "kube-api-access-8lxp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.070937 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7fad5aee-990f-4c1b-8610-db4cb774dcf7" (UID: "7fad5aee-990f-4c1b-8610-db4cb774dcf7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.071710 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7fad5aee-990f-4c1b-8610-db4cb774dcf7" (UID: "7fad5aee-990f-4c1b-8610-db4cb774dcf7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.080389 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-56d56698b8-d88q6" podStartSLOduration=6.080369384 podStartE2EDuration="6.080369384s" podCreationTimestamp="2025-12-05 12:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:14.050562295 +0000 UTC m=+1253.470629110" watchObservedRunningTime="2025-12-05 12:46:14.080369384 +0000 UTC m=+1253.500436189" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.081979 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5884d57c44-l8tbz" podStartSLOduration=36.081970815 podStartE2EDuration="36.081970815s" podCreationTimestamp="2025-12-05 12:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:14.07122439 +0000 UTC m=+1253.491291205" watchObservedRunningTime="2025-12-05 12:46:14.081970815 +0000 UTC m=+1253.502037630" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.087086 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7fad5aee-990f-4c1b-8610-db4cb774dcf7" (UID: "7fad5aee-990f-4c1b-8610-db4cb774dcf7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.107922 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config" (OuterVolumeSpecName: "config") pod "7fad5aee-990f-4c1b-8610-db4cb774dcf7" (UID: "7fad5aee-990f-4c1b-8610-db4cb774dcf7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.135152 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.135414 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.135424 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.135435 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fad5aee-990f-4c1b-8610-db4cb774dcf7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.135446 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lxp6\" (UniqueName: \"kubernetes.io/projected/7fad5aee-990f-4c1b-8610-db4cb774dcf7-kube-api-access-8lxp6\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.371570 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:14 crc kubenswrapper[4784]: I1205 12:46:14.382312 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-667d465cbf-r6dqc"] Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.014817 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fad5aee-990f-4c1b-8610-db4cb774dcf7" path="/var/lib/kubelet/pods/7fad5aee-990f-4c1b-8610-db4cb774dcf7/volumes" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.041985 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerStarted","Data":"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74"} Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.661090 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6bc87b8895-m5b7r"] Dec 05 12:46:15 crc kubenswrapper[4784]: E1205 12:46:15.661820 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fad5aee-990f-4c1b-8610-db4cb774dcf7" containerName="init" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.661833 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fad5aee-990f-4c1b-8610-db4cb774dcf7" containerName="init" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.662036 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fad5aee-990f-4c1b-8610-db4cb774dcf7" containerName="init" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.663072 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.667568 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.667788 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.681789 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6bc87b8895-m5b7r"] Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766026 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-internal-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766066 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-public-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766126 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766145 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-combined-ca-bundle\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766216 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrccq\" (UniqueName: \"kubernetes.io/projected/c4c57012-5781-4940-9551-6a53e2f9fad3-kube-api-access-hrccq\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766245 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-ovndb-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.766293 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-httpd-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867297 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867347 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-combined-ca-bundle\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867403 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrccq\" (UniqueName: \"kubernetes.io/projected/c4c57012-5781-4940-9551-6a53e2f9fad3-kube-api-access-hrccq\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867431 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-ovndb-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867475 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-httpd-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867513 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-internal-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.867529 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-public-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.880137 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.880241 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-ovndb-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.881078 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-internal-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.881078 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-public-tls-certs\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.881542 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-combined-ca-bundle\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.881708 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4c57012-5781-4940-9551-6a53e2f9fad3-httpd-config\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.900169 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrccq\" (UniqueName: \"kubernetes.io/projected/c4c57012-5781-4940-9551-6a53e2f9fad3-kube-api-access-hrccq\") pod \"neutron-6bc87b8895-m5b7r\" (UID: \"c4c57012-5781-4940-9551-6a53e2f9fad3\") " pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:15 crc kubenswrapper[4784]: I1205 12:46:15.987102 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.055465 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e88e0cbd-da67-4123-97dd-6840f902d9f1","Type":"ContainerStarted","Data":"8d0899b1c40e69e777826436db281e5f979443ca07a0ce0f8634ed1f3b244da7"} Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.062332 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerStarted","Data":"939b90befea8539870cbe36b6f5192fd9a0cac640ba20efb942874be42be9891"} Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.077761 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=6.07773914 podStartE2EDuration="6.07773914s" podCreationTimestamp="2025-12-05 12:46:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:16.07167035 +0000 UTC m=+1255.491737165" watchObservedRunningTime="2025-12-05 12:46:16.07773914 +0000 UTC m=+1255.497805955" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.078619 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" event={"ID":"1e1035a9-06b4-47c8-8781-e46a35a2f3c9","Type":"ContainerStarted","Data":"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e"} Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.078978 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.080828 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerStarted","Data":"2964ad9cf932774e3e8627abaa8728709c35d0281c58aa26cc85c3e7c804e4e8"} Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.080944 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-log" containerID="cri-o://5b8692acbe09d6fb2de78c1e6356949da4627e9def7da5ae22271c717d381793" gracePeriod=30 Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.081226 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-httpd" containerID="cri-o://2964ad9cf932774e3e8627abaa8728709c35d0281c58aa26cc85c3e7c804e4e8" gracePeriod=30 Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.140972 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" podStartSLOduration=8.140948001 podStartE2EDuration="8.140948001s" podCreationTimestamp="2025-12-05 12:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:16.118774919 +0000 UTC m=+1255.538841734" watchObservedRunningTime="2025-12-05 12:46:16.140948001 +0000 UTC m=+1255.561014816" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.184136 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.184099937 podStartE2EDuration="8.184099937s" podCreationTimestamp="2025-12-05 12:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:16.181495806 +0000 UTC m=+1255.601562631" watchObservedRunningTime="2025-12-05 12:46:16.184099937 +0000 UTC m=+1255.604166752" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.544699 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.748367 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6bc87b8895-m5b7r"] Dec 05 12:46:16 crc kubenswrapper[4784]: W1205 12:46:16.795481 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4c57012_5781_4940_9551_6a53e2f9fad3.slice/crio-82eeadeeba8e6f2b46f979f9bd0d0c58148ddabcf7d296327d2f57943ff7ddd7 WatchSource:0}: Error finding container 82eeadeeba8e6f2b46f979f9bd0d0c58148ddabcf7d296327d2f57943ff7ddd7: Status 404 returned error can't find the container with id 82eeadeeba8e6f2b46f979f9bd0d0c58148ddabcf7d296327d2f57943ff7ddd7 Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.816094 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:46:16 crc kubenswrapper[4784]: I1205 12:46:16.816217 4784 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.104514 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerStarted","Data":"77e2a7c42808e9e0e4c00d4af24858424deab4379fd3e42a566f5ef44e9abf18"} Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.104840 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-log" containerID="cri-o://939b90befea8539870cbe36b6f5192fd9a0cac640ba20efb942874be42be9891" gracePeriod=30 Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.105312 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-httpd" containerID="cri-o://77e2a7c42808e9e0e4c00d4af24858424deab4379fd3e42a566f5ef44e9abf18" gracePeriod=30 Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.115896 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bc87b8895-m5b7r" event={"ID":"c4c57012-5781-4940-9551-6a53e2f9fad3","Type":"ContainerStarted","Data":"82eeadeeba8e6f2b46f979f9bd0d0c58148ddabcf7d296327d2f57943ff7ddd7"} Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.139042 4784 generic.go:334] "Generic (PLEG): container finished" podID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerID="2964ad9cf932774e3e8627abaa8728709c35d0281c58aa26cc85c3e7c804e4e8" exitCode=0 Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.139094 4784 generic.go:334] "Generic (PLEG): container finished" podID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerID="5b8692acbe09d6fb2de78c1e6356949da4627e9def7da5ae22271c717d381793" exitCode=143 Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.139279 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerDied","Data":"2964ad9cf932774e3e8627abaa8728709c35d0281c58aa26cc85c3e7c804e4e8"} Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.139364 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerDied","Data":"5b8692acbe09d6fb2de78c1e6356949da4627e9def7da5ae22271c717d381793"} Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.146144 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.146121421 podStartE2EDuration="9.146121421s" podCreationTimestamp="2025-12-05 12:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:17.12654951 +0000 UTC m=+1256.546616325" watchObservedRunningTime="2025-12-05 12:46:17.146121421 +0000 UTC m=+1256.566188226" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.323284 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522673 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522747 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522834 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522871 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522940 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.522964 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9m8p\" (UniqueName: \"kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.523024 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.524219 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs" (OuterVolumeSpecName: "logs") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.526531 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.531850 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts" (OuterVolumeSpecName: "scripts") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.533324 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.533421 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p" (OuterVolumeSpecName: "kube-api-access-s9m8p") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "kube-api-access-s9m8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: E1205 12:46:17.576225 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle podName:528c8fca-e0a5-4a77-bde7-fc2d149416d0 nodeName:}" failed. No retries permitted until 2025-12-05 12:46:18.076174394 +0000 UTC m=+1257.496241209 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0") : error deleting /var/lib/kubelet/pods/528c8fca-e0a5-4a77-bde7-fc2d149416d0/volume-subpaths: remove /var/lib/kubelet/pods/528c8fca-e0a5-4a77-bde7-fc2d149416d0/volume-subpaths: no such file or directory Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.581324 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data" (OuterVolumeSpecName: "config-data") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626003 4784 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626040 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626051 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/528c8fca-e0a5-4a77-bde7-fc2d149416d0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626062 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9m8p\" (UniqueName: \"kubernetes.io/projected/528c8fca-e0a5-4a77-bde7-fc2d149416d0-kube-api-access-s9m8p\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626087 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.626097 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.650370 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.729062 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:17 crc kubenswrapper[4784]: I1205 12:46:17.933916 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.136206 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") pod \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\" (UID: \"528c8fca-e0a5-4a77-bde7-fc2d149416d0\") " Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.140169 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "528c8fca-e0a5-4a77-bde7-fc2d149416d0" (UID: "528c8fca-e0a5-4a77-bde7-fc2d149416d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.160573 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"528c8fca-e0a5-4a77-bde7-fc2d149416d0","Type":"ContainerDied","Data":"8063c9468605028eab51a8a58c527881f9a9daa03504f5da0aa2c993235b88cb"} Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.160651 4784 scope.go:117] "RemoveContainer" containerID="2964ad9cf932774e3e8627abaa8728709c35d0281c58aa26cc85c3e7c804e4e8" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.160651 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.166570 4784 generic.go:334] "Generic (PLEG): container finished" podID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerID="77e2a7c42808e9e0e4c00d4af24858424deab4379fd3e42a566f5ef44e9abf18" exitCode=0 Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.166605 4784 generic.go:334] "Generic (PLEG): container finished" podID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerID="939b90befea8539870cbe36b6f5192fd9a0cac640ba20efb942874be42be9891" exitCode=143 Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.166673 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerDied","Data":"77e2a7c42808e9e0e4c00d4af24858424deab4379fd3e42a566f5ef44e9abf18"} Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.166696 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerDied","Data":"939b90befea8539870cbe36b6f5192fd9a0cac640ba20efb942874be42be9891"} Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.169088 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bc87b8895-m5b7r" event={"ID":"c4c57012-5781-4940-9551-6a53e2f9fad3","Type":"ContainerStarted","Data":"e1b68d47fe4d79a8d635efe919fd56090633d7ad231a18e0e91d08d73c9fc8ce"} Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.232441 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.239846 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528c8fca-e0a5-4a77-bde7-fc2d149416d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.255873 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.288104 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:18 crc kubenswrapper[4784]: E1205 12:46:18.289064 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-log" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.289130 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-log" Dec 05 12:46:18 crc kubenswrapper[4784]: E1205 12:46:18.289163 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-httpd" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.289170 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-httpd" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.289949 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-log" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.289995 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" containerName="glance-httpd" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.291377 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.300178 4784 scope.go:117] "RemoveContainer" containerID="5b8692acbe09d6fb2de78c1e6356949da4627e9def7da5ae22271c717d381793" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.307008 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.317374 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.324614 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.444871 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.444962 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445029 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445053 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445094 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92k5g\" (UniqueName: \"kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445116 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445131 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.445148 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547310 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547371 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547419 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92k5g\" (UniqueName: \"kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547458 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547474 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547494 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547535 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547613 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.547980 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.548060 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.548123 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.561776 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.562300 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.570032 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.572036 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92k5g\" (UniqueName: \"kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.582282 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.603362 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " pod="openstack/glance-default-external-api-0" Dec 05 12:46:18 crc kubenswrapper[4784]: I1205 12:46:18.666835 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.018177 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="528c8fca-e0a5-4a77-bde7-fc2d149416d0" path="/var/lib/kubelet/pods/528c8fca-e0a5-4a77-bde7-fc2d149416d0/volumes" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.090836 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.160688 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.160746 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.188261 4784 generic.go:334] "Generic (PLEG): container finished" podID="851aaea1-2d18-4f91-b410-5fdb0a7f42ec" containerID="064e84eff9398f531da02547908007f542373a3dbc6a82449493cad6c1506a07" exitCode=0 Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.188318 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zx82c" event={"ID":"851aaea1-2d18-4f91-b410-5fdb0a7f42ec","Type":"ContainerDied","Data":"064e84eff9398f531da02547908007f542373a3dbc6a82449493cad6c1506a07"} Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.196064 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ac26b89e-ca78-49b7-9332-ef83dfaf4a87","Type":"ContainerDied","Data":"f57cad56c62db7e8788d623751fd929c4d872b72d6fdec0e64ed563c93a04494"} Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.196089 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f57cad56c62db7e8788d623751fd929c4d872b72d6fdec0e64ed563c93a04494" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.202299 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.339068 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.339521 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369233 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369312 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369344 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wbp9\" (UniqueName: \"kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369371 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369431 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369468 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369589 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data\") pod \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\" (UID: \"ac26b89e-ca78-49b7-9332-ef83dfaf4a87\") " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.369764 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs" (OuterVolumeSpecName: "logs") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.370074 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.370846 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.379375 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.388356 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts" (OuterVolumeSpecName: "scripts") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.388426 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9" (OuterVolumeSpecName: "kube-api-access-4wbp9") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "kube-api-access-4wbp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.423007 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.435469 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data" (OuterVolumeSpecName: "config-data") pod "ac26b89e-ca78-49b7-9332-ef83dfaf4a87" (UID: "ac26b89e-ca78-49b7-9332-ef83dfaf4a87"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471364 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471398 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wbp9\" (UniqueName: \"kubernetes.io/projected/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-kube-api-access-4wbp9\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471409 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471418 4784 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471436 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.471446 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac26b89e-ca78-49b7-9332-ef83dfaf4a87-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.490778 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.572709 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:19 crc kubenswrapper[4784]: I1205 12:46:19.676078 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.058958 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.094143 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.208159 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"bce4865197866fe9c9832cea29515d91f28d0c5b2110f830504b13d0a2b0c362"} Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.211413 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bc87b8895-m5b7r" event={"ID":"c4c57012-5781-4940-9551-6a53e2f9fad3","Type":"ContainerStarted","Data":"bad7bd0e73099f3ba49596a220a2ad7afd2653a6f35f5b541e7942aa73f9072d"} Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.211505 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.224844 4784 generic.go:334] "Generic (PLEG): container finished" podID="4b05cb70-952a-4d24-a3e5-cbbff5d53021" containerID="ed101b41af71c06e8fc74105f382f320686edc591a1254573e361ce8b50553b4" exitCode=0 Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.225674 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jptmz" event={"ID":"4b05cb70-952a-4d24-a3e5-cbbff5d53021","Type":"ContainerDied","Data":"ed101b41af71c06e8fc74105f382f320686edc591a1254573e361ce8b50553b4"} Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.225706 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.225749 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.237770 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6bc87b8895-m5b7r" podStartSLOduration=5.237745824 podStartE2EDuration="5.237745824s" podCreationTimestamp="2025-12-05 12:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:20.236452983 +0000 UTC m=+1259.656519798" watchObservedRunningTime="2025-12-05 12:46:20.237745824 +0000 UTC m=+1259.657812639" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.296785 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.360352 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.376167 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.384288 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: E1205 12:46:20.384785 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-log" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.384804 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-log" Dec 05 12:46:20 crc kubenswrapper[4784]: E1205 12:46:20.384826 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-httpd" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.384834 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-httpd" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.385026 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-httpd" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.385053 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" containerName="glance-log" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.386666 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.389881 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.390810 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.416655 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.449324 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496160 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496218 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dhkh\" (UniqueName: \"kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496261 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496305 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496366 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496395 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496418 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.496462 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.598397 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.598738 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dhkh\" (UniqueName: \"kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.598869 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.598986 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.599125 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.599263 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.599376 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.599481 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.600255 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.600398 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.600723 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.606099 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.607764 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.625826 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.626600 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.634063 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dhkh\" (UniqueName: \"kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.668736 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:46:20 crc kubenswrapper[4784]: I1205 12:46:20.732256 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.055608 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac26b89e-ca78-49b7-9332-ef83dfaf4a87" path="/var/lib/kubelet/pods/ac26b89e-ca78-49b7-9332-ef83dfaf4a87/volumes" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.246036 4784 generic.go:334] "Generic (PLEG): container finished" podID="31a07479-cab0-4561-b49b-73b1c3dad744" containerID="9ae2fd9a85d67e8d89687efbdea1588a1417a78ed52f566976c127a094db0480" exitCode=1 Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.246637 4784 scope.go:117] "RemoveContainer" containerID="9ae2fd9a85d67e8d89687efbdea1588a1417a78ed52f566976c127a094db0480" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.246711 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerDied","Data":"9ae2fd9a85d67e8d89687efbdea1588a1417a78ed52f566976c127a094db0480"} Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.546324 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.575393 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.814155 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 12:46:21 crc kubenswrapper[4784]: I1205 12:46:21.822118 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 12:46:22 crc kubenswrapper[4784]: I1205 12:46:22.266433 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:46:22 crc kubenswrapper[4784]: I1205 12:46:22.295391 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.166399 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.239126 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.239445 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="dnsmasq-dns" containerID="cri-o://0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf" gracePeriod=10 Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.640461 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.650610 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zx82c" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.804832 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.804965 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data\") pod \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.804988 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805034 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts\") pod \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805063 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqpnp\" (UniqueName: \"kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805157 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805239 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle\") pod \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805270 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs\") pod \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805317 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805356 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw99n\" (UniqueName: \"kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n\") pod \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\" (UID: \"851aaea1-2d18-4f91-b410-5fdb0a7f42ec\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.805386 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys\") pod \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\" (UID: \"4b05cb70-952a-4d24-a3e5-cbbff5d53021\") " Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.809249 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs" (OuterVolumeSpecName: "logs") pod "851aaea1-2d18-4f91-b410-5fdb0a7f42ec" (UID: "851aaea1-2d18-4f91-b410-5fdb0a7f42ec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.827996 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp" (OuterVolumeSpecName: "kube-api-access-bqpnp") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "kube-api-access-bqpnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.828947 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n" (OuterVolumeSpecName: "kube-api-access-gw99n") pod "851aaea1-2d18-4f91-b410-5fdb0a7f42ec" (UID: "851aaea1-2d18-4f91-b410-5fdb0a7f42ec"). InnerVolumeSpecName "kube-api-access-gw99n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.829009 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.829057 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts" (OuterVolumeSpecName: "scripts") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.829730 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.864351 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts" (OuterVolumeSpecName: "scripts") pod "851aaea1-2d18-4f91-b410-5fdb0a7f42ec" (UID: "851aaea1-2d18-4f91-b410-5fdb0a7f42ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907700 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907734 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907747 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw99n\" (UniqueName: \"kubernetes.io/projected/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-kube-api-access-gw99n\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907761 4784 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907773 4784 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907784 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.907794 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqpnp\" (UniqueName: \"kubernetes.io/projected/4b05cb70-952a-4d24-a3e5-cbbff5d53021-kube-api-access-bqpnp\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.930331 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data" (OuterVolumeSpecName: "config-data") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.968869 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b05cb70-952a-4d24-a3e5-cbbff5d53021" (UID: "4b05cb70-952a-4d24-a3e5-cbbff5d53021"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.968927 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "851aaea1-2d18-4f91-b410-5fdb0a7f42ec" (UID: "851aaea1-2d18-4f91-b410-5fdb0a7f42ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:24 crc kubenswrapper[4784]: I1205 12:46:24.984313 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data" (OuterVolumeSpecName: "config-data") pod "851aaea1-2d18-4f91-b410-5fdb0a7f42ec" (UID: "851aaea1-2d18-4f91-b410-5fdb0a7f42ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.010269 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.010303 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b05cb70-952a-4d24-a3e5-cbbff5d53021-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.010315 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.010326 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/851aaea1-2d18-4f91-b410-5fdb0a7f42ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.042973 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.111957 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config\") pod \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.112028 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68df4\" (UniqueName: \"kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4\") pod \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.112062 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc\") pod \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.112208 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb\") pod \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.112231 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb\") pod \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\" (UID: \"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1\") " Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.122076 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4" (OuterVolumeSpecName: "kube-api-access-68df4") pod "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" (UID: "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1"). InnerVolumeSpecName "kube-api-access-68df4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.219339 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68df4\" (UniqueName: \"kubernetes.io/projected/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-kube-api-access-68df4\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.316361 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zx82c" event={"ID":"851aaea1-2d18-4f91-b410-5fdb0a7f42ec","Type":"ContainerDied","Data":"f81046e74ef906ad90d19cdd55431b7a4803c0fe17668bebf94813a8f62e713a"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.316402 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f81046e74ef906ad90d19cdd55431b7a4803c0fe17668bebf94813a8f62e713a" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.316481 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zx82c" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.321938 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerStarted","Data":"07d80b8e8c5a0e1b3d7a95c3b9c3e904f7b6e6451fbf0a5668a87778e7605013"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.329765 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bwtqt" event={"ID":"c921ceb1-e577-4b4a-be99-3544491930d3","Type":"ContainerStarted","Data":"1c164b5e8d03ab5c187ce9704d36cd74b5e9701d1919c7d4f77c0c11c3625fb3"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.337477 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerStarted","Data":"7e8b7236dd5da54a2156d923a2ffbbba283ac5abea62eb59f87953d914bd80c1"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.337506 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" containerID="cri-o://7e8b7236dd5da54a2156d923a2ffbbba283ac5abea62eb59f87953d914bd80c1" gracePeriod=30 Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.363081 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-bwtqt" podStartSLOduration=4.716471451 podStartE2EDuration="56.363062475s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:33.249013802 +0000 UTC m=+1212.669080617" lastFinishedPulling="2025-12-05 12:46:24.895604826 +0000 UTC m=+1264.315671641" observedRunningTime="2025-12-05 12:46:25.35873176 +0000 UTC m=+1264.778798575" watchObservedRunningTime="2025-12-05 12:46:25.363062475 +0000 UTC m=+1264.783129290" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.365526 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"cfe476517fd12a40d6fefa5f79275b17c2054438d81fdfe61f05f708037c427b"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.389474 4784 generic.go:334] "Generic (PLEG): container finished" podID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerID="0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf" exitCode=0 Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.389544 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.389622 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" event={"ID":"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1","Type":"ContainerDied","Data":"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.389660 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f76dd6ddf-4shbw" event={"ID":"8c250fed-82c3-44b8-bf7f-b6ef3a2965a1","Type":"ContainerDied","Data":"119cdb7237863791afb33c6a032531c723f5445d19ab6f769e88a59b154b8c81"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.389682 4784 scope.go:117] "RemoveContainer" containerID="0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.407246 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.412094 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jptmz" event={"ID":"4b05cb70-952a-4d24-a3e5-cbbff5d53021","Type":"ContainerDied","Data":"ed57b2197543792be23f3be3323dad10640cc3786ca25add77d0025fdd45c895"} Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.412142 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed57b2197543792be23f3be3323dad10640cc3786ca25add77d0025fdd45c895" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.412256 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jptmz" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.475736 4784 scope.go:117] "RemoveContainer" containerID="16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.536578 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" (UID: "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.541518 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config" (OuterVolumeSpecName: "config") pod "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" (UID: "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.560897 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" (UID: "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.566532 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" (UID: "8c250fed-82c3-44b8-bf7f-b6ef3a2965a1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.572907 4784 scope.go:117] "RemoveContainer" containerID="0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf" Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.574609 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf\": container with ID starting with 0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf not found: ID does not exist" containerID="0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.574643 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf"} err="failed to get container status \"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf\": rpc error: code = NotFound desc = could not find container \"0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf\": container with ID starting with 0e613d9dd113c951afd15c648262de3f6869b0eaeaf60ad1238d68ee7320b8cf not found: ID does not exist" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.574675 4784 scope.go:117] "RemoveContainer" containerID="16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2" Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.574959 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2\": container with ID starting with 16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2 not found: ID does not exist" containerID="16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.574976 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2"} err="failed to get container status \"16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2\": rpc error: code = NotFound desc = could not find container \"16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2\": container with ID starting with 16bee9ba48c0b0ab51080d6dd7a25693fb10f218cb5d7dda666030cf67fd30d2 not found: ID does not exist" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.627669 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.627715 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.627728 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.627737 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.765007 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.792273 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f76dd6ddf-4shbw"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805238 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6df6c9b849-hzswf"] Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.805641 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="851aaea1-2d18-4f91-b410-5fdb0a7f42ec" containerName="placement-db-sync" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805658 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="851aaea1-2d18-4f91-b410-5fdb0a7f42ec" containerName="placement-db-sync" Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.805673 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b05cb70-952a-4d24-a3e5-cbbff5d53021" containerName="keystone-bootstrap" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805679 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b05cb70-952a-4d24-a3e5-cbbff5d53021" containerName="keystone-bootstrap" Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.805693 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="dnsmasq-dns" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805700 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="dnsmasq-dns" Dec 05 12:46:25 crc kubenswrapper[4784]: E1205 12:46:25.805727 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="init" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805734 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="init" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805892 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="851aaea1-2d18-4f91-b410-5fdb0a7f42ec" containerName="placement-db-sync" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805903 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b05cb70-952a-4d24-a3e5-cbbff5d53021" containerName="keystone-bootstrap" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.805915 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" containerName="dnsmasq-dns" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.806551 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.812660 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.812885 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.812987 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.813077 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.813493 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lkxwc" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.813732 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.817668 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6df6c9b849-hzswf"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.905881 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-59c9dd888d-55zdv"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.907449 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.915757 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.915813 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9l6d5" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.915759 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.916002 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.918772 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.925830 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59c9dd888d-55zdv"] Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.933707 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-config-data\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.933790 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-fernet-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934167 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnr7t\" (UniqueName: \"kubernetes.io/projected/74bc0f27-17f2-4980-8c67-3a980c2e267d-kube-api-access-tnr7t\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934265 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-combined-ca-bundle\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934375 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-scripts\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934398 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-credential-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934430 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-internal-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:25 crc kubenswrapper[4784]: I1205 12:46:25.934459 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-public-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.035906 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-config-data\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.035967 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-fernet-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.035997 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-config-data\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036023 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnr7t\" (UniqueName: \"kubernetes.io/projected/74bc0f27-17f2-4980-8c67-3a980c2e267d-kube-api-access-tnr7t\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036044 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-combined-ca-bundle\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036075 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-internal-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036096 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-scripts\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036119 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-combined-ca-bundle\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036136 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq2gl\" (UniqueName: \"kubernetes.io/projected/e09b58a6-8baa-4c70-92dc-f54061239d1b-kube-api-access-bq2gl\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036175 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e09b58a6-8baa-4c70-92dc-f54061239d1b-logs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036224 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-public-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036265 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-scripts\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036283 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-credential-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036305 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-internal-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.036324 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-public-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.042096 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-credential-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.050686 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-public-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.051535 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-fernet-keys\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.051675 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-config-data\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.052446 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-combined-ca-bundle\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.058882 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-scripts\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.059039 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/74bc0f27-17f2-4980-8c67-3a980c2e267d-internal-tls-certs\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.062433 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnr7t\" (UniqueName: \"kubernetes.io/projected/74bc0f27-17f2-4980-8c67-3a980c2e267d-kube-api-access-tnr7t\") pod \"keystone-6df6c9b849-hzswf\" (UID: \"74bc0f27-17f2-4980-8c67-3a980c2e267d\") " pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.135822 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.138954 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-config-data\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139014 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-combined-ca-bundle\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139048 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-internal-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139073 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-scripts\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq2gl\" (UniqueName: \"kubernetes.io/projected/e09b58a6-8baa-4c70-92dc-f54061239d1b-kube-api-access-bq2gl\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139124 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e09b58a6-8baa-4c70-92dc-f54061239d1b-logs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.139149 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-public-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.141570 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e09b58a6-8baa-4c70-92dc-f54061239d1b-logs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.149096 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-config-data\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.149304 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-combined-ca-bundle\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.149579 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-scripts\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.152662 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-public-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.156578 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e09b58a6-8baa-4c70-92dc-f54061239d1b-internal-tls-certs\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.158653 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq2gl\" (UniqueName: \"kubernetes.io/projected/e09b58a6-8baa-4c70-92dc-f54061239d1b-kube-api-access-bq2gl\") pod \"placement-59c9dd888d-55zdv\" (UID: \"e09b58a6-8baa-4c70-92dc-f54061239d1b\") " pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.264497 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.347494 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.347890 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api-log" containerID="cri-o://2de19768eaa35e73bb81e6523b0c64a9bf3209f080fef2729cad20d498ea00d0" gracePeriod=30 Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.348385 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api" containerID="cri-o://a2d57e911d363f308109c061480931a780e722a62a5199faa807f05c0602cd68" gracePeriod=30 Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.459407 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerStarted","Data":"14304b8e9e38f80bdb48ac4ab4c1df56f5463f7aac128e2baa25d06351958a50"} Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.474906 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerStarted","Data":"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366"} Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.483765 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerStarted","Data":"231f4e7974ee1b7a58afbcd6f8df44026e1162ed5902112b2eb4d27b80811787"} Dec 05 12:46:26 crc kubenswrapper[4784]: I1205 12:46:26.517693 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"012bb17a8dea160d3116f043d5934067dacb1ad0514fbd9e5b119a5ad8596c57"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.020841 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c250fed-82c3-44b8-bf7f-b6ef3a2965a1" path="/var/lib/kubelet/pods/8c250fed-82c3-44b8-bf7f-b6ef3a2965a1/volumes" Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.024951 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6df6c9b849-hzswf"] Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.135927 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59c9dd888d-55zdv"] Dec 05 12:46:27 crc kubenswrapper[4784]: W1205 12:46:27.149110 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode09b58a6_8baa_4c70_92dc_f54061239d1b.slice/crio-77aa5f5c685a04e6346ead00b9a67aba19b5e2a55932f3d7fc9de255b5503613 WatchSource:0}: Error finding container 77aa5f5c685a04e6346ead00b9a67aba19b5e2a55932f3d7fc9de255b5503613: Status 404 returned error can't find the container with id 77aa5f5c685a04e6346ead00b9a67aba19b5e2a55932f3d7fc9de255b5503613 Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.548688 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6df6c9b849-hzswf" event={"ID":"74bc0f27-17f2-4980-8c67-3a980c2e267d","Type":"ContainerStarted","Data":"70dddf8a94587b3cf1635e79eeb7844a7813b153e2991ea0a34ec837edef0e22"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.559890 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-24k4j" event={"ID":"e187592b-b331-4144-9a27-ba81e79121b6","Type":"ContainerStarted","Data":"fe7d87073b246c6fa9e935b02e2358264abb29fe814925789307c5ba3835fd3f"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.596647 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-24k4j" podStartSLOduration=4.567901689 podStartE2EDuration="58.596629807s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:30.821713099 +0000 UTC m=+1210.241779914" lastFinishedPulling="2025-12-05 12:46:24.850441217 +0000 UTC m=+1264.270508032" observedRunningTime="2025-12-05 12:46:27.583418535 +0000 UTC m=+1267.003485350" watchObservedRunningTime="2025-12-05 12:46:27.596629807 +0000 UTC m=+1267.016696612" Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.612451 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"d4344e9c74d238b901026207c5eae31bdb295af254248c2b79dbcef22a2b9693"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.612493 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"f72055427ce85899f46e7bc921a07d8d1b0b51b839aeee7276164612a2368e5b"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.612502 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"318c387b2f79c0471e1184b70e6f985fe924ba8fcfcdeb1a7860540b67e3b427"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.619003 4784 generic.go:334] "Generic (PLEG): container finished" podID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerID="2de19768eaa35e73bb81e6523b0c64a9bf3209f080fef2729cad20d498ea00d0" exitCode=143 Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.619109 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerDied","Data":"2de19768eaa35e73bb81e6523b0c64a9bf3209f080fef2729cad20d498ea00d0"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.620749 4784 scope.go:117] "RemoveContainer" containerID="01f72d36c4c937c0d367ab3fe8cb9f0e1937d4e62206d56e3a8970cf34710aa6" Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.624402 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59c9dd888d-55zdv" event={"ID":"e09b58a6-8baa-4c70-92dc-f54061239d1b","Type":"ContainerStarted","Data":"77aa5f5c685a04e6346ead00b9a67aba19b5e2a55932f3d7fc9de255b5503613"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.629924 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerStarted","Data":"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.634450 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerStarted","Data":"88d97f20401aabed2d2b719684b04a0fb86fb73778e11c78d156f31d4e295249"} Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.669668 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.669650224 podStartE2EDuration="9.669650224s" podCreationTimestamp="2025-12-05 12:46:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:27.660734926 +0000 UTC m=+1267.080801741" watchObservedRunningTime="2025-12-05 12:46:27.669650224 +0000 UTC m=+1267.089717039" Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.744766 4784 scope.go:117] "RemoveContainer" containerID="7925d8475d90375d35017a47dfa2b22c6ba668b0447e2b2863362e16e2f0a550" Dec 05 12:46:27 crc kubenswrapper[4784]: I1205 12:46:27.952562 4784 scope.go:117] "RemoveContainer" containerID="adf4b5573e24bc1dc065080e177d137cad1151a2fddf2cdad258bc73d39b0c6e" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.002753 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9322/\": read tcp 10.217.0.2:49668->10.217.0.163:9322: read: connection reset by peer" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.002811 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.163:9322/\": read tcp 10.217.0.2:49678->10.217.0.163:9322: read: connection reset by peer" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.653533 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59c9dd888d-55zdv" event={"ID":"e09b58a6-8baa-4c70-92dc-f54061239d1b","Type":"ContainerStarted","Data":"f6dc1051a0f20296412344b964f481c993f5db76450f2deae3ba59c1bd9ec1f1"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.654050 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59c9dd888d-55zdv" event={"ID":"e09b58a6-8baa-4c70-92dc-f54061239d1b","Type":"ContainerStarted","Data":"2f718421c2d0425928871ffa723d75c081e80276a130b10c6d55890e8096af83"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.655144 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.655170 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.658742 4784 generic.go:334] "Generic (PLEG): container finished" podID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerID="a2d57e911d363f308109c061480931a780e722a62a5199faa807f05c0602cd68" exitCode=0 Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.658819 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerDied","Data":"a2d57e911d363f308109c061480931a780e722a62a5199faa807f05c0602cd68"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.660697 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerStarted","Data":"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.662877 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6df6c9b849-hzswf" event={"ID":"74bc0f27-17f2-4980-8c67-3a980c2e267d","Type":"ContainerStarted","Data":"62a16520ab70d26ab63809020ded6661918280414dc9b586353335fae78600ce"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.663294 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.667861 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.667968 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.686180 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-59c9dd888d-55zdv" podStartSLOduration=3.686160688 podStartE2EDuration="3.686160688s" podCreationTimestamp="2025-12-05 12:46:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:28.685011022 +0000 UTC m=+1268.105077837" watchObservedRunningTime="2025-12-05 12:46:28.686160688 +0000 UTC m=+1268.106227493" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.738474 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.738458558 podStartE2EDuration="8.738458558s" podCreationTimestamp="2025-12-05 12:46:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:28.709752024 +0000 UTC m=+1268.129818839" watchObservedRunningTime="2025-12-05 12:46:28.738458558 +0000 UTC m=+1268.158525373" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.750748 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f34e93a8-02d9-44ef-a18e-13ce24c3f9a6","Type":"ContainerStarted","Data":"af4c69e148d3a72f0a9d3d58f7ae996f4b6157ab950bdc9fe414f80151e178bd"} Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.751934 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6df6c9b849-hzswf" podStartSLOduration=3.7519061689999997 podStartE2EDuration="3.751906169s" podCreationTimestamp="2025-12-05 12:46:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:28.740445561 +0000 UTC m=+1268.160512376" watchObservedRunningTime="2025-12-05 12:46:28.751906169 +0000 UTC m=+1268.171972984" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.752164 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.752881 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.754435 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.795649 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=82.51145562 podStartE2EDuration="2m12.795632512s" podCreationTimestamp="2025-12-05 12:44:16 +0000 UTC" firstStartedPulling="2025-12-05 12:45:28.237448359 +0000 UTC m=+1207.657515214" lastFinishedPulling="2025-12-05 12:46:18.521625291 +0000 UTC m=+1257.941692106" observedRunningTime="2025-12-05 12:46:28.786096244 +0000 UTC m=+1268.206163069" watchObservedRunningTime="2025-12-05 12:46:28.795632512 +0000 UTC m=+1268.215699327" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.929923 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs\") pod \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.929984 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca\") pod \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.930173 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxstq\" (UniqueName: \"kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq\") pod \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.930221 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data\") pod \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.930285 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle\") pod \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\" (UID: \"3964e71e-ad0d-4f97-9458-bb1defbd3a47\") " Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.930754 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs" (OuterVolumeSpecName: "logs") pod "3964e71e-ad0d-4f97-9458-bb1defbd3a47" (UID: "3964e71e-ad0d-4f97-9458-bb1defbd3a47"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.938419 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq" (OuterVolumeSpecName: "kube-api-access-hxstq") pod "3964e71e-ad0d-4f97-9458-bb1defbd3a47" (UID: "3964e71e-ad0d-4f97-9458-bb1defbd3a47"). InnerVolumeSpecName "kube-api-access-hxstq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.972969 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3964e71e-ad0d-4f97-9458-bb1defbd3a47" (UID: "3964e71e-ad0d-4f97-9458-bb1defbd3a47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:28 crc kubenswrapper[4784]: I1205 12:46:28.982680 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "3964e71e-ad0d-4f97-9458-bb1defbd3a47" (UID: "3964e71e-ad0d-4f97-9458-bb1defbd3a47"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.003393 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data" (OuterVolumeSpecName: "config-data") pod "3964e71e-ad0d-4f97-9458-bb1defbd3a47" (UID: "3964e71e-ad0d-4f97-9458-bb1defbd3a47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.032366 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.032609 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.032727 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3964e71e-ad0d-4f97-9458-bb1defbd3a47-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.032802 4784 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3964e71e-ad0d-4f97-9458-bb1defbd3a47-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.032870 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxstq\" (UniqueName: \"kubernetes.io/projected/3964e71e-ad0d-4f97-9458-bb1defbd3a47-kube-api-access-hxstq\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.079224 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:29 crc kubenswrapper[4784]: E1205 12:46:29.079899 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api-log" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.080177 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api-log" Dec 05 12:46:29 crc kubenswrapper[4784]: E1205 12:46:29.080280 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.080333 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.080566 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api-log" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.080628 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" containerName="watcher-api" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.081816 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.085476 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.089346 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.236921 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.237347 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q47lp\" (UniqueName: \"kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.237382 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.237447 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.237467 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.237506 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339350 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339398 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339445 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339553 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339605 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q47lp\" (UniqueName: \"kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.339636 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.341114 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.341794 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.341828 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.342045 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.342231 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5884d57c44-l8tbz" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.160:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.160:8443: connect: connection refused" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.343930 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.363007 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q47lp\" (UniqueName: \"kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp\") pod \"dnsmasq-dns-7b99d567b7-vvbts\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.449583 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.766893 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"3964e71e-ad0d-4f97-9458-bb1defbd3a47","Type":"ContainerDied","Data":"bda0177b249528eacb150937efea2f45580d5c2e3a8fd5f6a65977059b8ad092"} Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.767302 4784 scope.go:117] "RemoveContainer" containerID="a2d57e911d363f308109c061480931a780e722a62a5199faa807f05c0602cd68" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.767114 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.769406 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.769442 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.814650 4784 scope.go:117] "RemoveContainer" containerID="2de19768eaa35e73bb81e6523b0c64a9bf3209f080fef2729cad20d498ea00d0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.816815 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.831827 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.845319 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.847534 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.851439 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.851657 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.852003 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.869226 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.957206 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.965920 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.965976 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-public-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.965997 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.966032 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f043b4-34b6-413d-b8d2-25a247639c63-logs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.966096 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsxp8\" (UniqueName: \"kubernetes.io/projected/91f043b4-34b6-413d-b8d2-25a247639c63-kube-api-access-zsxp8\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.966130 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-config-data\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:29 crc kubenswrapper[4784]: I1205 12:46:29.966172 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.059397 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067511 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067561 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-public-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067648 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067683 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f043b4-34b6-413d-b8d2-25a247639c63-logs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067755 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsxp8\" (UniqueName: \"kubernetes.io/projected/91f043b4-34b6-413d-b8d2-25a247639c63-kube-api-access-zsxp8\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.067798 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-config-data\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.068063 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f043b4-34b6-413d-b8d2-25a247639c63-logs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.072136 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.073669 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-public-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.073864 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-config-data\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.074474 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.077830 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f043b4-34b6-413d-b8d2-25a247639c63-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.090075 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsxp8\" (UniqueName: \"kubernetes.io/projected/91f043b4-34b6-413d-b8d2-25a247639c63-kube-api-access-zsxp8\") pod \"watcher-api-0\" (UID: \"91f043b4-34b6-413d-b8d2-25a247639c63\") " pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.172653 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.627257 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 12:46:30 crc kubenswrapper[4784]: W1205 12:46:30.628529 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91f043b4_34b6_413d_b8d2_25a247639c63.slice/crio-d685554820639fc27e858398502a70cdad1163d096986b97c4e3da75c5c114b7 WatchSource:0}: Error finding container d685554820639fc27e858398502a70cdad1163d096986b97c4e3da75c5c114b7: Status 404 returned error can't find the container with id d685554820639fc27e858398502a70cdad1163d096986b97c4e3da75c5c114b7 Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.732509 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.732556 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.784140 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.794588 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"91f043b4-34b6-413d-b8d2-25a247639c63","Type":"ContainerStarted","Data":"d685554820639fc27e858398502a70cdad1163d096986b97c4e3da75c5c114b7"} Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.795227 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.797811 4784 generic.go:334] "Generic (PLEG): container finished" podID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerID="ade29fcc0c12ff3fa32471b57034251d7f1dc9018d5f17034d1180f7d2289e2b" exitCode=0 Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.798245 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" event={"ID":"7a27e6c2-282d-4569-bee1-8c27c888a7ad","Type":"ContainerDied","Data":"ade29fcc0c12ff3fa32471b57034251d7f1dc9018d5f17034d1180f7d2289e2b"} Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.798737 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" event={"ID":"7a27e6c2-282d-4569-bee1-8c27c888a7ad","Type":"ContainerStarted","Data":"34e80123c5cdeec38476e563a023bbb785f2d8fe37659003ca87047e2bdf5c79"} Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.804106 4784 generic.go:334] "Generic (PLEG): container finished" podID="31a07479-cab0-4561-b49b-73b1c3dad744" containerID="7e8b7236dd5da54a2156d923a2ffbbba283ac5abea62eb59f87953d914bd80c1" exitCode=1 Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.806104 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerDied","Data":"7e8b7236dd5da54a2156d923a2ffbbba283ac5abea62eb59f87953d914bd80c1"} Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.806179 4784 scope.go:117] "RemoveContainer" containerID="9ae2fd9a85d67e8d89687efbdea1588a1417a78ed52f566976c127a094db0480" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.807557 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.808615 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:30 crc kubenswrapper[4784]: I1205 12:46:30.969564 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.012649 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3964e71e-ad0d-4f97-9458-bb1defbd3a47" path="/var/lib/kubelet/pods/3964e71e-ad0d-4f97-9458-bb1defbd3a47/volumes" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.093602 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xmx9\" (UniqueName: \"kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9\") pod \"31a07479-cab0-4561-b49b-73b1c3dad744\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.093676 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca\") pod \"31a07479-cab0-4561-b49b-73b1c3dad744\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.093801 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data\") pod \"31a07479-cab0-4561-b49b-73b1c3dad744\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.093834 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle\") pod \"31a07479-cab0-4561-b49b-73b1c3dad744\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.093916 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs\") pod \"31a07479-cab0-4561-b49b-73b1c3dad744\" (UID: \"31a07479-cab0-4561-b49b-73b1c3dad744\") " Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.094418 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs" (OuterVolumeSpecName: "logs") pod "31a07479-cab0-4561-b49b-73b1c3dad744" (UID: "31a07479-cab0-4561-b49b-73b1c3dad744"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.094515 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a07479-cab0-4561-b49b-73b1c3dad744-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.099351 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9" (OuterVolumeSpecName: "kube-api-access-4xmx9") pod "31a07479-cab0-4561-b49b-73b1c3dad744" (UID: "31a07479-cab0-4561-b49b-73b1c3dad744"). InnerVolumeSpecName "kube-api-access-4xmx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.123033 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "31a07479-cab0-4561-b49b-73b1c3dad744" (UID: "31a07479-cab0-4561-b49b-73b1c3dad744"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.125338 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31a07479-cab0-4561-b49b-73b1c3dad744" (UID: "31a07479-cab0-4561-b49b-73b1c3dad744"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.146942 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data" (OuterVolumeSpecName: "config-data") pod "31a07479-cab0-4561-b49b-73b1c3dad744" (UID: "31a07479-cab0-4561-b49b-73b1c3dad744"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.196177 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.196230 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.196243 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xmx9\" (UniqueName: \"kubernetes.io/projected/31a07479-cab0-4561-b49b-73b1c3dad744-kube-api-access-4xmx9\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.196251 4784 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/31a07479-cab0-4561-b49b-73b1c3dad744-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.503479 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.853154 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"91f043b4-34b6-413d-b8d2-25a247639c63","Type":"ContainerStarted","Data":"d45d05531af529451c0813db3b27c89ec5b1104f8cb1e5710d15d0289b68cabb"} Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.853220 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"91f043b4-34b6-413d-b8d2-25a247639c63","Type":"ContainerStarted","Data":"48bc572fe605f4d2fb82bccda23a2aaf211d2302e12790e8adf10f7316d58c15"} Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.853452 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.856715 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" event={"ID":"7a27e6c2-282d-4569-bee1-8c27c888a7ad","Type":"ContainerStarted","Data":"9e0ce67af1461527dd0e21fbb01c83c0f949d4c2802082ec8978873772e737d4"} Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.856963 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.868265 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.868415 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"31a07479-cab0-4561-b49b-73b1c3dad744","Type":"ContainerDied","Data":"5c063822994f76aa4464f8ed523aaa42a21641ddf7dd8caa41a05a6b9fdd2bff"} Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.868453 4784 scope.go:117] "RemoveContainer" containerID="7e8b7236dd5da54a2156d923a2ffbbba283ac5abea62eb59f87953d914bd80c1" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.877793 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.877770069 podStartE2EDuration="2.877770069s" podCreationTimestamp="2025-12-05 12:46:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:31.877513191 +0000 UTC m=+1271.297580016" watchObservedRunningTime="2025-12-05 12:46:31.877770069 +0000 UTC m=+1271.297836924" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.927749 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" podStartSLOduration=2.927727707 podStartE2EDuration="2.927727707s" podCreationTimestamp="2025-12-05 12:46:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:31.924982642 +0000 UTC m=+1271.345049477" watchObservedRunningTime="2025-12-05 12:46:31.927727707 +0000 UTC m=+1271.347794522" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.959719 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.974375 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.983336 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:31 crc kubenswrapper[4784]: E1205 12:46:31.983804 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.983820 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.984033 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.984050 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.986813 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.990135 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 12:46:31 crc kubenswrapper[4784]: I1205 12:46:31.991868 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.124178 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.124361 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.124450 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.124558 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75t69\" (UniqueName: \"kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.124613 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.227875 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.228034 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.228128 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75t69\" (UniqueName: \"kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.228470 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.228738 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.229355 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.233909 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.237006 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.238630 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.253933 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75t69\" (UniqueName: \"kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69\") pod \"watcher-decision-engine-0\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.310925 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.879472 4784 generic.go:334] "Generic (PLEG): container finished" podID="c921ceb1-e577-4b4a-be99-3544491930d3" containerID="1c164b5e8d03ab5c187ce9704d36cd74b5e9701d1919c7d4f77c0c11c3625fb3" exitCode=0 Dec 05 12:46:32 crc kubenswrapper[4784]: I1205 12:46:32.879515 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bwtqt" event={"ID":"c921ceb1-e577-4b4a-be99-3544491930d3","Type":"ContainerDied","Data":"1c164b5e8d03ab5c187ce9704d36cd74b5e9701d1919c7d4f77c0c11c3625fb3"} Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.014162 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" path="/var/lib/kubelet/pods/31a07479-cab0-4561-b49b-73b1c3dad744/volumes" Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.453642 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7cdb6b7d4-mvtql" Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.521699 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.521940 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5884d57c44-l8tbz" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon-log" containerID="cri-o://427abff3eac7473e838fa85f2097041270a6b80a6e6f065dfda211890bb77d19" gracePeriod=30 Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.522064 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5884d57c44-l8tbz" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon" containerID="cri-o://773546a469ba89b933adcb75ad82aff9b851eb2d42eb822c7c78e5fe1ae30e3e" gracePeriod=30 Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.895131 4784 generic.go:334] "Generic (PLEG): container finished" podID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerID="773546a469ba89b933adcb75ad82aff9b851eb2d42eb822c7c78e5fe1ae30e3e" exitCode=0 Dec 05 12:46:33 crc kubenswrapper[4784]: I1205 12:46:33.895318 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerDied","Data":"773546a469ba89b933adcb75ad82aff9b851eb2d42eb822c7c78e5fe1ae30e3e"} Dec 05 12:46:34 crc kubenswrapper[4784]: I1205 12:46:34.539279 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:46:34 crc kubenswrapper[4784]: I1205 12:46:34.988832 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.173512 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.509964 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.697383 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data\") pod \"c921ceb1-e577-4b4a-be99-3544491930d3\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.697471 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9h8j\" (UniqueName: \"kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j\") pod \"c921ceb1-e577-4b4a-be99-3544491930d3\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.697652 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle\") pod \"c921ceb1-e577-4b4a-be99-3544491930d3\" (UID: \"c921ceb1-e577-4b4a-be99-3544491930d3\") " Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.702762 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j" (OuterVolumeSpecName: "kube-api-access-x9h8j") pod "c921ceb1-e577-4b4a-be99-3544491930d3" (UID: "c921ceb1-e577-4b4a-be99-3544491930d3"). InnerVolumeSpecName "kube-api-access-x9h8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.725540 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c921ceb1-e577-4b4a-be99-3544491930d3" (UID: "c921ceb1-e577-4b4a-be99-3544491930d3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.750092 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c921ceb1-e577-4b4a-be99-3544491930d3" (UID: "c921ceb1-e577-4b4a-be99-3544491930d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.799461 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.799489 4784 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c921ceb1-e577-4b4a-be99-3544491930d3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.799498 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9h8j\" (UniqueName: \"kubernetes.io/projected/c921ceb1-e577-4b4a-be99-3544491930d3-kube-api-access-x9h8j\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.916774 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bwtqt" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.916772 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bwtqt" event={"ID":"c921ceb1-e577-4b4a-be99-3544491930d3","Type":"ContainerDied","Data":"6931e4ec7e72f947059231343196cf10337a317defddc6d14c4e6272f74e04c6"} Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.916894 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6931e4ec7e72f947059231343196cf10337a317defddc6d14c4e6272f74e04c6" Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.921289 4784 generic.go:334] "Generic (PLEG): container finished" podID="e187592b-b331-4144-9a27-ba81e79121b6" containerID="fe7d87073b246c6fa9e935b02e2358264abb29fe814925789307c5ba3835fd3f" exitCode=0 Dec 05 12:46:35 crc kubenswrapper[4784]: I1205 12:46:35.921331 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-24k4j" event={"ID":"e187592b-b331-4144-9a27-ba81e79121b6","Type":"ContainerDied","Data":"fe7d87073b246c6fa9e935b02e2358264abb29fe814925789307c5ba3835fd3f"} Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.309009 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:46:36 crc kubenswrapper[4784]: W1205 12:46:36.317210 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ce21c3e_07a5_4404_827e_367acaba9d66.slice/crio-17803a9f081def484ae402373f4e55e1b97e5c57dfdc49d839896b38b0fec8cc WatchSource:0}: Error finding container 17803a9f081def484ae402373f4e55e1b97e5c57dfdc49d839896b38b0fec8cc: Status 404 returned error can't find the container with id 17803a9f081def484ae402373f4e55e1b97e5c57dfdc49d839896b38b0fec8cc Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.774600 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5866d77f58-dcc8l"] Dec 05 12:46:36 crc kubenswrapper[4784]: E1205 12:46:36.775009 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" containerName="barbican-db-sync" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.775025 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" containerName="barbican-db-sync" Dec 05 12:46:36 crc kubenswrapper[4784]: E1205 12:46:36.775043 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.775051 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a07479-cab0-4561-b49b-73b1c3dad744" containerName="watcher-decision-engine" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.775252 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" containerName="barbican-db-sync" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.776293 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.778037 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nndgt" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.779250 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.779532 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.827093 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5866d77f58-dcc8l"] Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.877251 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-655f48fc8f-k55s9"] Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.878798 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.883006 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.893862 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-655f48fc8f-k55s9"] Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926411 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926482 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data-custom\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926537 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsttl\" (UniqueName: \"kubernetes.io/projected/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-kube-api-access-hsttl\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926576 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1f830fd-3c91-4985-ac6f-96314a74acc1-logs\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926615 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-combined-ca-bundle\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926647 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rwdc\" (UniqueName: \"kubernetes.io/projected/d1f830fd-3c91-4985-ac6f-96314a74acc1-kube-api-access-4rwdc\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926674 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data-custom\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926715 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-logs\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926776 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.926802 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-combined-ca-bundle\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.952304 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.952604 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="dnsmasq-dns" containerID="cri-o://9e0ce67af1461527dd0e21fbb01c83c0f949d4c2802082ec8978873772e737d4" gracePeriod=10 Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.954306 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.991287 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerStarted","Data":"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6"} Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.992622 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-central-agent" containerID="cri-o://8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab" gracePeriod=30 Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.993434 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-notification-agent" containerID="cri-o://a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74" gracePeriod=30 Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.993420 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="proxy-httpd" containerID="cri-o://fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6" gracePeriod=30 Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.993474 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="sg-core" containerID="cri-o://472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366" gracePeriod=30 Dec 05 12:46:36 crc kubenswrapper[4784]: I1205 12:46:36.993253 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.048714 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerStarted","Data":"21cd6441ce99a6a9c28798369a643ebcea4cfc755457448e96eeb1474d186087"} Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.049014 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerStarted","Data":"17803a9f081def484ae402373f4e55e1b97e5c57dfdc49d839896b38b0fec8cc"} Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.051979 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-logs\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.052424 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-logs\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053036 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053063 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-combined-ca-bundle\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053150 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053212 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data-custom\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053270 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsttl\" (UniqueName: \"kubernetes.io/projected/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-kube-api-access-hsttl\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053313 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1f830fd-3c91-4985-ac6f-96314a74acc1-logs\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053351 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-combined-ca-bundle\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053417 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rwdc\" (UniqueName: \"kubernetes.io/projected/d1f830fd-3c91-4985-ac6f-96314a74acc1-kube-api-access-4rwdc\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.053435 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data-custom\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.055226 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1f830fd-3c91-4985-ac6f-96314a74acc1-logs\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.063634 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.064974 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.066494 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.068774 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-combined-ca-bundle\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.069315 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data-custom\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.076849 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-config-data-custom\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.078606 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-config-data\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.098458 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f830fd-3c91-4985-ac6f-96314a74acc1-combined-ca-bundle\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.106968 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsttl\" (UniqueName: \"kubernetes.io/projected/b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2-kube-api-access-hsttl\") pod \"barbican-worker-655f48fc8f-k55s9\" (UID: \"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2\") " pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.159598 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rwdc\" (UniqueName: \"kubernetes.io/projected/d1f830fd-3c91-4985-ac6f-96314a74acc1-kube-api-access-4rwdc\") pod \"barbican-keystone-listener-5866d77f58-dcc8l\" (UID: \"d1f830fd-3c91-4985-ac6f-96314a74acc1\") " pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.203285 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.204890 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.465150082 podStartE2EDuration="1m8.204869853s" podCreationTimestamp="2025-12-05 12:45:29 +0000 UTC" firstStartedPulling="2025-12-05 12:45:33.257705274 +0000 UTC m=+1212.677772089" lastFinishedPulling="2025-12-05 12:46:35.997425045 +0000 UTC m=+1275.417491860" observedRunningTime="2025-12-05 12:46:37.098604759 +0000 UTC m=+1276.518671574" watchObservedRunningTime="2025-12-05 12:46:37.204869853 +0000 UTC m=+1276.624936668" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.222105 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-655f48fc8f-k55s9" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258816 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258878 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrj5d\" (UniqueName: \"kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258908 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258925 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258969 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.258997 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.282269 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.283893 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.287613 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.305153 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.324364 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=6.32434644 podStartE2EDuration="6.32434644s" podCreationTimestamp="2025-12-05 12:46:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:37.149641151 +0000 UTC m=+1276.569707966" watchObservedRunningTime="2025-12-05 12:46:37.32434644 +0000 UTC m=+1276.744413255" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361250 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnrbb\" (UniqueName: \"kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361312 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361360 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361377 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361443 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361460 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361491 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361524 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrj5d\" (UniqueName: \"kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361549 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361564 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.361610 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.362392 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.362892 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.363413 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.364166 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.364180 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.382051 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrj5d\" (UniqueName: \"kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d\") pod \"dnsmasq-dns-6c4b958bbc-vf7wn\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.405013 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.470475 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.470518 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.470631 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.470684 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.470874 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnrbb\" (UniqueName: \"kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.471697 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.476567 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.479681 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.483313 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.494724 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnrbb\" (UniqueName: \"kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb\") pod \"barbican-api-5c8994b8f8-lr4cl\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.558920 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.581687 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.731804 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.886751 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-24k4j" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.977131 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-655f48fc8f-k55s9"] Dec 05 12:46:37 crc kubenswrapper[4784]: W1205 12:46:37.983432 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7fd6fd1_2f61_44a4_b8b7_9f1c38768db2.slice/crio-659f98edc11cd430a73b021846809665c47ddf2baf390dbb2ed98c302d9125ac WatchSource:0}: Error finding container 659f98edc11cd430a73b021846809665c47ddf2baf390dbb2ed98c302d9125ac: Status 404 returned error can't find the container with id 659f98edc11cd430a73b021846809665c47ddf2baf390dbb2ed98c302d9125ac Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992017 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992092 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992182 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992284 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992357 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992405 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knrkq\" (UniqueName: \"kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq\") pod \"e187592b-b331-4144-9a27-ba81e79121b6\" (UID: \"e187592b-b331-4144-9a27-ba81e79121b6\") " Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.992952 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:46:37 crc kubenswrapper[4784]: I1205 12:46:37.999348 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts" (OuterVolumeSpecName: "scripts") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.000741 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq" (OuterVolumeSpecName: "kube-api-access-knrkq") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "kube-api-access-knrkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.015392 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.056840 4784 generic.go:334] "Generic (PLEG): container finished" podID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerID="9e0ce67af1461527dd0e21fbb01c83c0f949d4c2802082ec8978873772e737d4" exitCode=0 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.056893 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" event={"ID":"7a27e6c2-282d-4569-bee1-8c27c888a7ad","Type":"ContainerDied","Data":"9e0ce67af1461527dd0e21fbb01c83c0f949d4c2802082ec8978873772e737d4"} Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.062158 4784 generic.go:334] "Generic (PLEG): container finished" podID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerID="fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6" exitCode=0 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.062413 4784 generic.go:334] "Generic (PLEG): container finished" podID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerID="472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366" exitCode=2 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.062489 4784 generic.go:334] "Generic (PLEG): container finished" podID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerID="8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab" exitCode=0 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.062295 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerDied","Data":"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6"} Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.062677 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerDied","Data":"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366"} Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.063079 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerDied","Data":"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab"} Dec 05 12:46:38 crc kubenswrapper[4784]: W1205 12:46:38.063281 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1f830fd_3c91_4985_ac6f_96314a74acc1.slice/crio-03e7160dd603295ef5ee7227120dc091d5e1dfa10122d78212d7c36a5e834996 WatchSource:0}: Error finding container 03e7160dd603295ef5ee7227120dc091d5e1dfa10122d78212d7c36a5e834996: Status 404 returned error can't find the container with id 03e7160dd603295ef5ee7227120dc091d5e1dfa10122d78212d7c36a5e834996 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.065194 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.065472 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-655f48fc8f-k55s9" event={"ID":"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2","Type":"ContainerStarted","Data":"659f98edc11cd430a73b021846809665c47ddf2baf390dbb2ed98c302d9125ac"} Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.072599 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-24k4j" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.072831 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-24k4j" event={"ID":"e187592b-b331-4144-9a27-ba81e79121b6","Type":"ContainerDied","Data":"cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a"} Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.072976 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc393da546dfd7c02cb81e0614ebb97dc17c17c306ce8edf8b6e9e5968b8ff0a" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.085582 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data" (OuterVolumeSpecName: "config-data") pod "e187592b-b331-4144-9a27-ba81e79121b6" (UID: "e187592b-b331-4144-9a27-ba81e79121b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.087560 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094570 4784 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e187592b-b331-4144-9a27-ba81e79121b6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094599 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094610 4784 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094620 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094628 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187592b-b331-4144-9a27-ba81e79121b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.094636 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knrkq\" (UniqueName: \"kubernetes.io/projected/e187592b-b331-4144-9a27-ba81e79121b6-kube-api-access-knrkq\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.112347 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5866d77f58-dcc8l"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196542 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196654 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196705 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196724 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196773 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q47lp\" (UniqueName: \"kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.196790 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb\") pod \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\" (UID: \"7a27e6c2-282d-4569-bee1-8c27c888a7ad\") " Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.212602 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp" (OuterVolumeSpecName: "kube-api-access-q47lp") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "kube-api-access-q47lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.276730 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.289725 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:38 crc kubenswrapper[4784]: E1205 12:46:38.290246 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="init" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.290267 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="init" Dec 05 12:46:38 crc kubenswrapper[4784]: E1205 12:46:38.290293 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="dnsmasq-dns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.290302 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="dnsmasq-dns" Dec 05 12:46:38 crc kubenswrapper[4784]: E1205 12:46:38.290321 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e187592b-b331-4144-9a27-ba81e79121b6" containerName="cinder-db-sync" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.290329 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e187592b-b331-4144-9a27-ba81e79121b6" containerName="cinder-db-sync" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.290501 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" containerName="dnsmasq-dns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.290535 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e187592b-b331-4144-9a27-ba81e79121b6" containerName="cinder-db-sync" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.291561 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.301571 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.303631 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.303660 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q47lp\" (UniqueName: \"kubernetes.io/projected/7a27e6c2-282d-4569-bee1-8c27c888a7ad-kube-api-access-q47lp\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.338254 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.338367 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.343144 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config" (OuterVolumeSpecName: "config") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.343782 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.353547 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a27e6c2-282d-4569-bee1-8c27c888a7ad" (UID: "7a27e6c2-282d-4569-bee1-8c27c888a7ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.366487 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.381248 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.384102 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.390708 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:46:38 crc kubenswrapper[4784]: W1205 12:46:38.391470 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddddeb6f6_dea0_4ddc_8946_ae1e3fe478fc.slice/crio-291d54cfe16dfb9253d9340d4b313c66ead36f1719dc0bde37050362ae2bef44 WatchSource:0}: Error finding container 291d54cfe16dfb9253d9340d4b313c66ead36f1719dc0bde37050362ae2bef44: Status 404 returned error can't find the container with id 291d54cfe16dfb9253d9340d4b313c66ead36f1719dc0bde37050362ae2bef44 Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404690 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404781 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404823 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404846 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404868 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krt8r\" (UniqueName: \"kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404910 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404953 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404964 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404972 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.404982 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a27e6c2-282d-4569-bee1-8c27c888a7ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.422796 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.450707 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.512289 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.512341 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.512462 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.512724 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbmpq\" (UniqueName: \"kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.512814 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513165 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513282 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513782 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513833 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513899 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.513924 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.514342 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krt8r\" (UniqueName: \"kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.515802 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.517111 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.517454 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.517693 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.518334 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.526702 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.528429 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.531346 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krt8r\" (UniqueName: \"kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r\") pod \"cinder-scheduler-0\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.532004 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.548863 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.615809 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.615893 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.615920 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.615961 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbmpq\" (UniqueName: \"kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.615979 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616003 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616019 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616047 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616068 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9g2p\" (UniqueName: \"kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616089 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616106 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616142 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616171 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.616927 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.617517 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.618381 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.618927 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.619464 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.640872 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.646906 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbmpq\" (UniqueName: \"kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq\") pod \"dnsmasq-dns-c688f947-l56ns\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736012 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736141 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736205 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736259 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736293 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9g2p\" (UniqueName: \"kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736334 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.736488 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.737321 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.741025 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.741571 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.742755 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.746808 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.746921 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.754482 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.758088 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9g2p\" (UniqueName: \"kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p\") pod \"cinder-api-0\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " pod="openstack/cinder-api-0" Dec 05 12:46:38 crc kubenswrapper[4784]: I1205 12:46:38.868645 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.086439 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" event={"ID":"d1f830fd-3c91-4985-ac6f-96314a74acc1","Type":"ContainerStarted","Data":"03e7160dd603295ef5ee7227120dc091d5e1dfa10122d78212d7c36a5e834996"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.088282 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerStarted","Data":"766f9f440ff6ea0804d67f4f60e4fc74d51342cea217159a1984116133a23438"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.088335 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerStarted","Data":"291d54cfe16dfb9253d9340d4b313c66ead36f1719dc0bde37050362ae2bef44"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.089661 4784 generic.go:334] "Generic (PLEG): container finished" podID="458ba694-df9e-475f-bfa7-8d9acd200e8c" containerID="441e315db26c4e249fc66759fec038d170f42388269a79e2e5a3ecb60276bd5b" exitCode=0 Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.089770 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" event={"ID":"458ba694-df9e-475f-bfa7-8d9acd200e8c","Type":"ContainerDied","Data":"441e315db26c4e249fc66759fec038d170f42388269a79e2e5a3ecb60276bd5b"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.089832 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" event={"ID":"458ba694-df9e-475f-bfa7-8d9acd200e8c","Type":"ContainerStarted","Data":"e027a327095c02431ae4bbd70b80c9ea3c3b85b718cbe261b2c79d21b49bf851"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.099439 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" event={"ID":"7a27e6c2-282d-4569-bee1-8c27c888a7ad","Type":"ContainerDied","Data":"34e80123c5cdeec38476e563a023bbb785f2d8fe37659003ca87047e2bdf5c79"} Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.099495 4784 scope.go:117] "RemoveContainer" containerID="9e0ce67af1461527dd0e21fbb01c83c0f949d4c2802082ec8978873772e737d4" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.100258 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b99d567b7-vvbts" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.148253 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.156945 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.165177 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b99d567b7-vvbts"] Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.248606 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.707557 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860022 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860098 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860137 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860166 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860223 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.860271 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrj5d\" (UniqueName: \"kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d\") pod \"458ba694-df9e-475f-bfa7-8d9acd200e8c\" (UID: \"458ba694-df9e-475f-bfa7-8d9acd200e8c\") " Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.868678 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d" (OuterVolumeSpecName: "kube-api-access-hrj5d") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "kube-api-access-hrj5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.885407 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.890557 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.894945 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.895610 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.900816 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config" (OuterVolumeSpecName: "config") pod "458ba694-df9e-475f-bfa7-8d9acd200e8c" (UID: "458ba694-df9e-475f-bfa7-8d9acd200e8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963079 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrj5d\" (UniqueName: \"kubernetes.io/projected/458ba694-df9e-475f-bfa7-8d9acd200e8c-kube-api-access-hrj5d\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963111 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963120 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963127 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963136 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:39 crc kubenswrapper[4784]: I1205 12:46:39.963148 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/458ba694-df9e-475f-bfa7-8d9acd200e8c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.110550 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.110547 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c4b958bbc-vf7wn" event={"ID":"458ba694-df9e-475f-bfa7-8d9acd200e8c","Type":"ContainerDied","Data":"e027a327095c02431ae4bbd70b80c9ea3c3b85b718cbe261b2c79d21b49bf851"} Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.111753 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerStarted","Data":"dd858e92d15fd7b17bad3eeb55751db80cef85cff247ed9f8a08b5da4c6cf045"} Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.174252 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.189724 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.195936 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.234920 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c4b958bbc-vf7wn"] Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.247784 4784 scope.go:117] "RemoveContainer" containerID="ade29fcc0c12ff3fa32471b57034251d7f1dc9018d5f17034d1180f7d2289e2b" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.466927 4784 scope.go:117] "RemoveContainer" containerID="441e315db26c4e249fc66759fec038d170f42388269a79e2e5a3ecb60276bd5b" Dec 05 12:46:40 crc kubenswrapper[4784]: I1205 12:46:40.988440 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.035387 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="458ba694-df9e-475f-bfa7-8d9acd200e8c" path="/var/lib/kubelet/pods/458ba694-df9e-475f-bfa7-8d9acd200e8c/volumes" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.036405 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a27e6c2-282d-4569-bee1-8c27c888a7ad" path="/var/lib/kubelet/pods/7a27e6c2-282d-4569-bee1-8c27c888a7ad/volumes" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.038791 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:41 crc kubenswrapper[4784]: W1205 12:46:41.066097 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d5f4a13_fb6a_4e24_940d_52e60d0c73f8.slice/crio-a30215a0ce1e632583cc93fb7ab30fa2263d5b8e3d872cc8955344b901327484 WatchSource:0}: Error finding container a30215a0ce1e632583cc93fb7ab30fa2263d5b8e3d872cc8955344b901327484: Status 404 returned error can't find the container with id a30215a0ce1e632583cc93fb7ab30fa2263d5b8e3d872cc8955344b901327484 Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.178527 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerStarted","Data":"2ca89ad316719941d6e5e0487dc6ed12df2ec0958fec84a3e10e6e022e53d93e"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.179231 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.179280 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.200891 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c688f947-l56ns" event={"ID":"d4679e9e-a588-435e-96b5-7c2d31a6cc03","Type":"ContainerStarted","Data":"d1404c95c6677c5892a2801aa6b67f0d1f17fed41fc5e9c9cfaab6535ccac437"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.214332 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5c8994b8f8-lr4cl" podStartSLOduration=4.214310252 podStartE2EDuration="4.214310252s" podCreationTimestamp="2025-12-05 12:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:41.198315753 +0000 UTC m=+1280.618382568" watchObservedRunningTime="2025-12-05 12:46:41.214310252 +0000 UTC m=+1280.634377057" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.233644 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerStarted","Data":"a30215a0ce1e632583cc93fb7ab30fa2263d5b8e3d872cc8955344b901327484"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.246864 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerID="21cd6441ce99a6a9c28798369a643ebcea4cfc755457448e96eeb1474d186087" exitCode=1 Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.246966 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerDied","Data":"21cd6441ce99a6a9c28798369a643ebcea4cfc755457448e96eeb1474d186087"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.247703 4784 scope.go:117] "RemoveContainer" containerID="21cd6441ce99a6a9c28798369a643ebcea4cfc755457448e96eeb1474d186087" Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.274369 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" event={"ID":"d1f830fd-3c91-4985-ac6f-96314a74acc1","Type":"ContainerStarted","Data":"13541cfb3fa018740392184bb506422f62843f83b0339f649f2d309b7b4d7906"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.279625 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-655f48fc8f-k55s9" event={"ID":"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2","Type":"ContainerStarted","Data":"5304cc3ee41f2a3692df43d2c033ff6e7ebd47e4120cecef04ddba12e1474a1e"} Dec 05 12:46:41 crc kubenswrapper[4784]: I1205 12:46:41.295244 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.139397 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.228590 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.229025 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230456 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230566 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230614 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230637 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230687 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m455j\" (UniqueName: \"kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.230714 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data\") pod \"ee259e74-24f6-4a39-b3d9-3bd926ace782\" (UID: \"ee259e74-24f6-4a39-b3d9-3bd926ace782\") " Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.231392 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.234459 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.248798 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts" (OuterVolumeSpecName: "scripts") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.258368 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j" (OuterVolumeSpecName: "kube-api-access-m455j") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "kube-api-access-m455j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.302711 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.312893 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.312938 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.314521 4784 generic.go:334] "Generic (PLEG): container finished" podID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerID="b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00" exitCode=0 Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.314574 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c688f947-l56ns" event={"ID":"d4679e9e-a588-435e-96b5-7c2d31a6cc03","Type":"ContainerDied","Data":"b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.320687 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerStarted","Data":"1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.336993 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.337028 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m455j\" (UniqueName: \"kubernetes.io/projected/ee259e74-24f6-4a39-b3d9-3bd926ace782-kube-api-access-m455j\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.337054 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee259e74-24f6-4a39-b3d9-3bd926ace782-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.342753 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" event={"ID":"d1f830fd-3c91-4985-ac6f-96314a74acc1","Type":"ContainerStarted","Data":"9bbf914b93b9576d51e074aa82a2861550252744a2e400b003a362eb0ca68708"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.347307 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerStarted","Data":"f6eec2743071ba7ac1b1d7ab9a27d25e0964ab1a8dfbb2d1f6f71d92a0402bbd"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.360481 4784 generic.go:334] "Generic (PLEG): container finished" podID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerID="a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74" exitCode=0 Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.360559 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerDied","Data":"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.360587 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee259e74-24f6-4a39-b3d9-3bd926ace782","Type":"ContainerDied","Data":"1c115486369e3e3d38f90f80740a7547e2fa55768f3f637d76baa41fc815389c"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.360607 4784 scope.go:117] "RemoveContainer" containerID="fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.360741 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.394492 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-655f48fc8f-k55s9" event={"ID":"b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2","Type":"ContainerStarted","Data":"c164e339d1d19b67d5ee5577868770b3c8898a46d08b783f74f4c8997bd7b182"} Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.419047 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5866d77f58-dcc8l" podStartSLOduration=3.967702703 podStartE2EDuration="6.419029016s" podCreationTimestamp="2025-12-05 12:46:36 +0000 UTC" firstStartedPulling="2025-12-05 12:46:38.064923647 +0000 UTC m=+1277.484990462" lastFinishedPulling="2025-12-05 12:46:40.51624994 +0000 UTC m=+1279.936316775" observedRunningTime="2025-12-05 12:46:42.376707766 +0000 UTC m=+1281.796774581" watchObservedRunningTime="2025-12-05 12:46:42.419029016 +0000 UTC m=+1281.839095831" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.428843 4784 scope.go:117] "RemoveContainer" containerID="472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.456229 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-655f48fc8f-k55s9" podStartSLOduration=3.988493811 podStartE2EDuration="6.456205205s" podCreationTimestamp="2025-12-05 12:46:36 +0000 UTC" firstStartedPulling="2025-12-05 12:46:37.999177097 +0000 UTC m=+1277.419243912" lastFinishedPulling="2025-12-05 12:46:40.466888491 +0000 UTC m=+1279.886955306" observedRunningTime="2025-12-05 12:46:42.434436446 +0000 UTC m=+1281.854503261" watchObservedRunningTime="2025-12-05 12:46:42.456205205 +0000 UTC m=+1281.876272020" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.477843 4784 scope.go:117] "RemoveContainer" containerID="a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.532056 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.544822 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.572354 4784 scope.go:117] "RemoveContainer" containerID="8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.599353 4784 scope.go:117] "RemoveContainer" containerID="fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6" Dec 05 12:46:42 crc kubenswrapper[4784]: E1205 12:46:42.599955 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6\": container with ID starting with fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6 not found: ID does not exist" containerID="fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.600047 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6"} err="failed to get container status \"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6\": rpc error: code = NotFound desc = could not find container \"fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6\": container with ID starting with fa336b4018742e1bd6bc52ac57d9ac584c50caecceeb36ac430b9f2ff9c7b0d6 not found: ID does not exist" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.600138 4784 scope.go:117] "RemoveContainer" containerID="472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366" Dec 05 12:46:42 crc kubenswrapper[4784]: E1205 12:46:42.600886 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366\": container with ID starting with 472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366 not found: ID does not exist" containerID="472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.600931 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366"} err="failed to get container status \"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366\": rpc error: code = NotFound desc = could not find container \"472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366\": container with ID starting with 472ef199bddfcf498f98d8428f6cdd40f6115c50a8e1b3c362aff43783290366 not found: ID does not exist" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.600957 4784 scope.go:117] "RemoveContainer" containerID="a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74" Dec 05 12:46:42 crc kubenswrapper[4784]: E1205 12:46:42.602093 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74\": container with ID starting with a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74 not found: ID does not exist" containerID="a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.602123 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74"} err="failed to get container status \"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74\": rpc error: code = NotFound desc = could not find container \"a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74\": container with ID starting with a82f9006a602c19583e2d55d81c80a786b050b17e4b7ac411347b1ecefe31e74 not found: ID does not exist" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.602139 4784 scope.go:117] "RemoveContainer" containerID="8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab" Dec 05 12:46:42 crc kubenswrapper[4784]: E1205 12:46:42.604642 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab\": container with ID starting with 8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab not found: ID does not exist" containerID="8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.604672 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab"} err="failed to get container status \"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab\": rpc error: code = NotFound desc = could not find container \"8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab\": container with ID starting with 8abd08789218c1d119999de5e917fb05499dc4cd916135ba40d89c9a7ff777ab not found: ID does not exist" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.677259 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.687349 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data" (OuterVolumeSpecName: "config-data") pod "ee259e74-24f6-4a39-b3d9-3bd926ace782" (UID: "ee259e74-24f6-4a39-b3d9-3bd926ace782"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.748506 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.748537 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee259e74-24f6-4a39-b3d9-3bd926ace782-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:42 crc kubenswrapper[4784]: I1205 12:46:42.996221 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.024993 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025268 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:46:43 crc kubenswrapper[4784]: E1205 12:46:43.025553 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="sg-core" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025566 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="sg-core" Dec 05 12:46:43 crc kubenswrapper[4784]: E1205 12:46:43.025581 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-central-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025586 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-central-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: E1205 12:46:43.025604 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-notification-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025610 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-notification-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: E1205 12:46:43.025617 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458ba694-df9e-475f-bfa7-8d9acd200e8c" containerName="init" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025623 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="458ba694-df9e-475f-bfa7-8d9acd200e8c" containerName="init" Dec 05 12:46:43 crc kubenswrapper[4784]: E1205 12:46:43.025646 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="proxy-httpd" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025653 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="proxy-httpd" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025816 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="proxy-httpd" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025829 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="458ba694-df9e-475f-bfa7-8d9acd200e8c" containerName="init" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025846 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-notification-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025860 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="ceilometer-central-agent" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.025872 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" containerName="sg-core" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.040729 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.042221 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.046549 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.048043 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167178 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167258 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr4r6\" (UniqueName: \"kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167346 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167489 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167548 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167594 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.167899 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.269914 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.269992 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.270010 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr4r6\" (UniqueName: \"kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.270033 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.270079 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.270098 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.270125 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.271198 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.271311 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.282855 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.285236 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.286751 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.287824 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.292161 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr4r6\" (UniqueName: \"kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6\") pod \"ceilometer-0\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.367753 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.415141 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerStarted","Data":"cd1af3671b6a033ab9b96d423079150fc10f5b063cd3b5401e93f8b094f1e25b"} Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.444754 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.499843205 podStartE2EDuration="5.444734485s" podCreationTimestamp="2025-12-05 12:46:38 +0000 UTC" firstStartedPulling="2025-12-05 12:46:39.595731221 +0000 UTC m=+1279.015798036" lastFinishedPulling="2025-12-05 12:46:40.540622501 +0000 UTC m=+1279.960689316" observedRunningTime="2025-12-05 12:46:43.438166991 +0000 UTC m=+1282.858233816" watchObservedRunningTime="2025-12-05 12:46:43.444734485 +0000 UTC m=+1282.864801300" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.454865 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c688f947-l56ns" event={"ID":"d4679e9e-a588-435e-96b5-7c2d31a6cc03","Type":"ContainerStarted","Data":"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6"} Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.455014 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.458494 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerStarted","Data":"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929"} Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.479894 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c688f947-l56ns" podStartSLOduration=5.479871892 podStartE2EDuration="5.479871892s" podCreationTimestamp="2025-12-05 12:46:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:43.476555808 +0000 UTC m=+1282.896622623" watchObservedRunningTime="2025-12-05 12:46:43.479871892 +0000 UTC m=+1282.899938707" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.643287 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 12:46:43 crc kubenswrapper[4784]: I1205 12:46:43.928467 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.021322 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7b64cff454-t47d4"] Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.022867 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.028317 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.028571 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.045482 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b64cff454-t47d4"] Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122339 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data-custom\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122396 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvdck\" (UniqueName: \"kubernetes.io/projected/20cab8a1-167e-4a61-9de1-dbca99fc6978-kube-api-access-cvdck\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122436 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-combined-ca-bundle\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122514 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-internal-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122535 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cab8a1-167e-4a61-9de1-dbca99fc6978-logs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122558 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.122626 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-public-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227178 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data-custom\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227542 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvdck\" (UniqueName: \"kubernetes.io/projected/20cab8a1-167e-4a61-9de1-dbca99fc6978-kube-api-access-cvdck\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227576 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-combined-ca-bundle\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227613 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-internal-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227633 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cab8a1-167e-4a61-9de1-dbca99fc6978-logs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227654 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.227688 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-public-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.231632 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cab8a1-167e-4a61-9de1-dbca99fc6978-logs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.234929 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-public-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.244749 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data-custom\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.256010 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvdck\" (UniqueName: \"kubernetes.io/projected/20cab8a1-167e-4a61-9de1-dbca99fc6978-kube-api-access-cvdck\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.263048 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-internal-tls-certs\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.263804 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-combined-ca-bundle\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.265527 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cab8a1-167e-4a61-9de1-dbca99fc6978-config-data\") pod \"barbican-api-7b64cff454-t47d4\" (UID: \"20cab8a1-167e-4a61-9de1-dbca99fc6978\") " pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.358205 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.481463 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerStarted","Data":"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e"} Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.482130 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api-log" containerID="cri-o://ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" gracePeriod=30 Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.482285 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.482518 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api" containerID="cri-o://825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" gracePeriod=30 Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.495877 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerStarted","Data":"8e3594415ca105984f24daadd0efe4b5ee77eef5b1f71799640235995207d0c6"} Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.507954 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.507932246 podStartE2EDuration="6.507932246s" podCreationTimestamp="2025-12-05 12:46:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:44.500789642 +0000 UTC m=+1283.920856467" watchObservedRunningTime="2025-12-05 12:46:44.507932246 +0000 UTC m=+1283.927999061" Dec 05 12:46:44 crc kubenswrapper[4784]: I1205 12:46:44.924484 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b64cff454-t47d4"] Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.034802 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee259e74-24f6-4a39-b3d9-3bd926ace782" path="/var/lib/kubelet/pods/ee259e74-24f6-4a39-b3d9-3bd926ace782/volumes" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.160810 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.250881 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9g2p\" (UniqueName: \"kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251008 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251050 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251138 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251155 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251170 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.251257 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts\") pod \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\" (UID: \"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8\") " Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.254257 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.254593 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs" (OuterVolumeSpecName: "logs") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.260316 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts" (OuterVolumeSpecName: "scripts") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.260365 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p" (OuterVolumeSpecName: "kube-api-access-x9g2p") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "kube-api-access-x9g2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.269986 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.298598 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.326338 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data" (OuterVolumeSpecName: "config-data") pod "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" (UID: "7d5f4a13-fb6a-4e24-940d-52e60d0c73f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.354248 4784 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357450 4784 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357644 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357716 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357791 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357857 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.357924 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9g2p\" (UniqueName: \"kubernetes.io/projected/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8-kube-api-access-x9g2p\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519105 4784 generic.go:334] "Generic (PLEG): container finished" podID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerID="825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" exitCode=0 Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519628 4784 generic.go:334] "Generic (PLEG): container finished" podID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerID="ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" exitCode=143 Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519745 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerDied","Data":"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519824 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerDied","Data":"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519902 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7d5f4a13-fb6a-4e24-940d-52e60d0c73f8","Type":"ContainerDied","Data":"a30215a0ce1e632583cc93fb7ab30fa2263d5b8e3d872cc8955344b901327484"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.519983 4784 scope.go:117] "RemoveContainer" containerID="825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.520156 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.528269 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerStarted","Data":"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.528317 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerStarted","Data":"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.532597 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b64cff454-t47d4" event={"ID":"20cab8a1-167e-4a61-9de1-dbca99fc6978","Type":"ContainerStarted","Data":"aa0671d3182c24db614aebc6c508c15f28f05b5af1e78e9a7500e36f4c3890c2"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.532647 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b64cff454-t47d4" event={"ID":"20cab8a1-167e-4a61-9de1-dbca99fc6978","Type":"ContainerStarted","Data":"2821984e7ce8d3871e2fe95f583b38431ed5a13508b5fcc27467c1899cc9cf7a"} Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.573210 4784 scope.go:117] "RemoveContainer" containerID="ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.577896 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.589960 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.602423 4784 scope.go:117] "RemoveContainer" containerID="825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" Dec 05 12:46:45 crc kubenswrapper[4784]: E1205 12:46:45.608405 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e\": container with ID starting with 825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e not found: ID does not exist" containerID="825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.608459 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e"} err="failed to get container status \"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e\": rpc error: code = NotFound desc = could not find container \"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e\": container with ID starting with 825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e not found: ID does not exist" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.608491 4784 scope.go:117] "RemoveContainer" containerID="ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" Dec 05 12:46:45 crc kubenswrapper[4784]: E1205 12:46:45.609704 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929\": container with ID starting with ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929 not found: ID does not exist" containerID="ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.609753 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929"} err="failed to get container status \"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929\": rpc error: code = NotFound desc = could not find container \"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929\": container with ID starting with ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929 not found: ID does not exist" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.609794 4784 scope.go:117] "RemoveContainer" containerID="825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.610283 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e"} err="failed to get container status \"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e\": rpc error: code = NotFound desc = could not find container \"825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e\": container with ID starting with 825703a456ff502b748619568371f2453b98399b3f61f59d1e9e9ddd8074292e not found: ID does not exist" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.610313 4784 scope.go:117] "RemoveContainer" containerID="ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.610654 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929"} err="failed to get container status \"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929\": rpc error: code = NotFound desc = could not find container \"ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929\": container with ID starting with ac2d5f8bcd6121f5a547113ae9bd48ade41b32629f2cc418f634276f2739b929 not found: ID does not exist" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.619263 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:45 crc kubenswrapper[4784]: E1205 12:46:45.619946 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api-log" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.620021 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api-log" Dec 05 12:46:45 crc kubenswrapper[4784]: E1205 12:46:45.620118 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.620224 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.620533 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api-log" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.620636 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" containerName="cinder-api" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.621938 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.626842 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.627153 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.627274 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.627289 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.765956 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766213 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630347d3-27c8-4ef8-8bc4-f06ff57474ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766240 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766266 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-scripts\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766316 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766458 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766675 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766791 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630347d3-27c8-4ef8-8bc4-f06ff57474ed-logs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.766963 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn2rs\" (UniqueName: \"kubernetes.io/projected/630347d3-27c8-4ef8-8bc4-f06ff57474ed-kube-api-access-cn2rs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.868841 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630347d3-27c8-4ef8-8bc4-f06ff57474ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.868903 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.868930 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-scripts\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.868956 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.869026 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.869071 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.869097 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630347d3-27c8-4ef8-8bc4-f06ff57474ed-logs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.869158 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn2rs\" (UniqueName: \"kubernetes.io/projected/630347d3-27c8-4ef8-8bc4-f06ff57474ed-kube-api-access-cn2rs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.869229 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.872550 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/630347d3-27c8-4ef8-8bc4-f06ff57474ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.872565 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/630347d3-27c8-4ef8-8bc4-f06ff57474ed-logs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.884161 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.884813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-scripts\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.888834 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.890220 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.892814 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn2rs\" (UniqueName: \"kubernetes.io/projected/630347d3-27c8-4ef8-8bc4-f06ff57474ed-kube-api-access-cn2rs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.894062 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.898264 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/630347d3-27c8-4ef8-8bc4-f06ff57474ed-config-data\") pod \"cinder-api-0\" (UID: \"630347d3-27c8-4ef8-8bc4-f06ff57474ed\") " pod="openstack/cinder-api-0" Dec 05 12:46:45 crc kubenswrapper[4784]: I1205 12:46:45.956646 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.006372 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6bc87b8895-m5b7r" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.075373 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.075954 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56d56698b8-d88q6" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-httpd" containerID="cri-o://4a5c7f44debaf13680fb6230e39d2ed7ba59cfd6b23fc1026cbf066d344a6003" gracePeriod=30 Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.075644 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56d56698b8-d88q6" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-api" containerID="cri-o://96467bc389028b09dc49f48cd3bfca18326fc12165a5b3523bbaff001ad5c886" gracePeriod=30 Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.474131 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.547990 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerID="1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3" exitCode=1 Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.548074 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerDied","Data":"1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3"} Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.548115 4784 scope.go:117] "RemoveContainer" containerID="21cd6441ce99a6a9c28798369a643ebcea4cfc755457448e96eeb1474d186087" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.548822 4784 scope.go:117] "RemoveContainer" containerID="1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3" Dec 05 12:46:46 crc kubenswrapper[4784]: E1205 12:46:46.549070 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(9ce21c3e-07a5-4404-827e-367acaba9d66)\"" pod="openstack/watcher-decision-engine-0" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.555116 4784 generic.go:334] "Generic (PLEG): container finished" podID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerID="4a5c7f44debaf13680fb6230e39d2ed7ba59cfd6b23fc1026cbf066d344a6003" exitCode=0 Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.555178 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerDied","Data":"4a5c7f44debaf13680fb6230e39d2ed7ba59cfd6b23fc1026cbf066d344a6003"} Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.563143 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerStarted","Data":"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0"} Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.599616 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b64cff454-t47d4" event={"ID":"20cab8a1-167e-4a61-9de1-dbca99fc6978","Type":"ContainerStarted","Data":"34c9c2bc8fde3390c643eca99e0b7276e01e3b08d98af77fb7050434707ac19f"} Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.599897 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.599947 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.614363 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"630347d3-27c8-4ef8-8bc4-f06ff57474ed","Type":"ContainerStarted","Data":"f54c183f5c08c2f514e2ae062f039d3fe5ae322a1b5234c50306896dfb6fcf5b"} Dec 05 12:46:46 crc kubenswrapper[4784]: I1205 12:46:46.628692 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7b64cff454-t47d4" podStartSLOduration=3.628674808 podStartE2EDuration="3.628674808s" podCreationTimestamp="2025-12-05 12:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:46.625146088 +0000 UTC m=+1286.045212923" watchObservedRunningTime="2025-12-05 12:46:46.628674808 +0000 UTC m=+1286.048741623" Dec 05 12:46:47 crc kubenswrapper[4784]: I1205 12:46:47.012399 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d5f4a13-fb6a-4e24-940d-52e60d0c73f8" path="/var/lib/kubelet/pods/7d5f4a13-fb6a-4e24-940d-52e60d0c73f8/volumes" Dec 05 12:46:47 crc kubenswrapper[4784]: I1205 12:46:47.630288 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"630347d3-27c8-4ef8-8bc4-f06ff57474ed","Type":"ContainerStarted","Data":"f9c664f94bf618c68fc539d3d887755e829d28e84f090c5e190775641f1a02b0"} Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.651266 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"630347d3-27c8-4ef8-8bc4-f06ff57474ed","Type":"ContainerStarted","Data":"9417af0c9fc9b84b42f6a28529b5e807a42fb6b9e6f2e7d4c86b0065a3a159a5"} Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.655373 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.662127 4784 generic.go:334] "Generic (PLEG): container finished" podID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerID="96467bc389028b09dc49f48cd3bfca18326fc12165a5b3523bbaff001ad5c886" exitCode=0 Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.662272 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerDied","Data":"96467bc389028b09dc49f48cd3bfca18326fc12165a5b3523bbaff001ad5c886"} Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.665786 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerStarted","Data":"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb"} Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.665897 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.686819 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.686794468 podStartE2EDuration="3.686794468s" podCreationTimestamp="2025-12-05 12:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:48.671539792 +0000 UTC m=+1288.091606607" watchObservedRunningTime="2025-12-05 12:46:48.686794468 +0000 UTC m=+1288.106861273" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.714480 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.782933952 podStartE2EDuration="6.71445437s" podCreationTimestamp="2025-12-05 12:46:42 +0000 UTC" firstStartedPulling="2025-12-05 12:46:43.952800482 +0000 UTC m=+1283.372867297" lastFinishedPulling="2025-12-05 12:46:47.88432087 +0000 UTC m=+1287.304387715" observedRunningTime="2025-12-05 12:46:48.701072053 +0000 UTC m=+1288.121138868" watchObservedRunningTime="2025-12-05 12:46:48.71445437 +0000 UTC m=+1288.134521185" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.745369 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.844650 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.844872 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="dnsmasq-dns" containerID="cri-o://c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e" gracePeriod=10 Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.856057 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.869431 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 12:46:48 crc kubenswrapper[4784]: I1205 12:46:48.981983 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.056780 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config\") pod \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.056832 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs\") pod \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.056918 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9x6d\" (UniqueName: \"kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d\") pod \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.056955 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config\") pod \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.057156 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle\") pod \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\" (UID: \"11bdb484-f2ce-4363-a365-c6fa7a15d4ad\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.065893 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d" (OuterVolumeSpecName: "kube-api-access-q9x6d") pod "11bdb484-f2ce-4363-a365-c6fa7a15d4ad" (UID: "11bdb484-f2ce-4363-a365-c6fa7a15d4ad"). InnerVolumeSpecName "kube-api-access-q9x6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.071713 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "11bdb484-f2ce-4363-a365-c6fa7a15d4ad" (UID: "11bdb484-f2ce-4363-a365-c6fa7a15d4ad"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.161467 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9x6d\" (UniqueName: \"kubernetes.io/projected/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-kube-api-access-q9x6d\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.161496 4784 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.170163 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.165:5353: connect: connection refused" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.200672 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11bdb484-f2ce-4363-a365-c6fa7a15d4ad" (UID: "11bdb484-f2ce-4363-a365-c6fa7a15d4ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.258343 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config" (OuterVolumeSpecName: "config") pod "11bdb484-f2ce-4363-a365-c6fa7a15d4ad" (UID: "11bdb484-f2ce-4363-a365-c6fa7a15d4ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.267467 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.267510 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.314326 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "11bdb484-f2ce-4363-a365-c6fa7a15d4ad" (UID: "11bdb484-f2ce-4363-a365-c6fa7a15d4ad"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.369536 4784 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/11bdb484-f2ce-4363-a365-c6fa7a15d4ad-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.498487 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.547571 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.674946 4784 generic.go:334] "Generic (PLEG): container finished" podID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerID="c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e" exitCode=0 Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.674986 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675024 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" event={"ID":"1e1035a9-06b4-47c8-8781-e46a35a2f3c9","Type":"ContainerDied","Data":"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e"} Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675076 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f66b8c67-hfrbq" event={"ID":"1e1035a9-06b4-47c8-8781-e46a35a2f3c9","Type":"ContainerDied","Data":"82057a35d9107d78e15475ddb5111920999bdf98b8bf5c8f2e2612b2fecefff6"} Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675097 4784 scope.go:117] "RemoveContainer" containerID="c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675252 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb\") pod \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675395 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config\") pod \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675779 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v69s6\" (UniqueName: \"kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6\") pod \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675813 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb\") pod \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.675840 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc\") pod \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\" (UID: \"1e1035a9-06b4-47c8-8781-e46a35a2f3c9\") " Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.681761 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56d56698b8-d88q6" event={"ID":"11bdb484-f2ce-4363-a365-c6fa7a15d4ad","Type":"ContainerDied","Data":"79d1474f6ba6c8f33548dadd2f02a33de661b0d308161ea3797435004b6c7f1a"} Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.682015 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56d56698b8-d88q6" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.683108 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="cinder-scheduler" containerID="cri-o://f6eec2743071ba7ac1b1d7ab9a27d25e0964ab1a8dfbb2d1f6f71d92a0402bbd" gracePeriod=30 Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.683283 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="probe" containerID="cri-o://cd1af3671b6a033ab9b96d423079150fc10f5b063cd3b5401e93f8b094f1e25b" gracePeriod=30 Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.698692 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6" (OuterVolumeSpecName: "kube-api-access-v69s6") pod "1e1035a9-06b4-47c8-8781-e46a35a2f3c9" (UID: "1e1035a9-06b4-47c8-8781-e46a35a2f3c9"). InnerVolumeSpecName "kube-api-access-v69s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.705615 4784 scope.go:117] "RemoveContainer" containerID="f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.729826 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.743939 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config" (OuterVolumeSpecName: "config") pod "1e1035a9-06b4-47c8-8781-e46a35a2f3c9" (UID: "1e1035a9-06b4-47c8-8781-e46a35a2f3c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.745505 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-56d56698b8-d88q6"] Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.745604 4784 scope.go:117] "RemoveContainer" containerID="c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e" Dec 05 12:46:49 crc kubenswrapper[4784]: E1205 12:46:49.746627 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e\": container with ID starting with c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e not found: ID does not exist" containerID="c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.746766 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e"} err="failed to get container status \"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e\": rpc error: code = NotFound desc = could not find container \"c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e\": container with ID starting with c98a4221776a428967d5ebf486d3f7cd495138092bcfa4577cf485f39075d63e not found: ID does not exist" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.746877 4784 scope.go:117] "RemoveContainer" containerID="f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39" Dec 05 12:46:49 crc kubenswrapper[4784]: E1205 12:46:49.747472 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39\": container with ID starting with f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39 not found: ID does not exist" containerID="f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.747573 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39"} err="failed to get container status \"f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39\": rpc error: code = NotFound desc = could not find container \"f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39\": container with ID starting with f4f454c86824b1d011ca6fd6e190a8b67a6fdb135edfabeb882bf166142b9a39 not found: ID does not exist" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.747656 4784 scope.go:117] "RemoveContainer" containerID="4a5c7f44debaf13680fb6230e39d2ed7ba59cfd6b23fc1026cbf066d344a6003" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.758554 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e1035a9-06b4-47c8-8781-e46a35a2f3c9" (UID: "1e1035a9-06b4-47c8-8781-e46a35a2f3c9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.759938 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e1035a9-06b4-47c8-8781-e46a35a2f3c9" (UID: "1e1035a9-06b4-47c8-8781-e46a35a2f3c9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.770820 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.776634 4784 scope.go:117] "RemoveContainer" containerID="96467bc389028b09dc49f48cd3bfca18326fc12165a5b3523bbaff001ad5c886" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.777576 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e1035a9-06b4-47c8-8781-e46a35a2f3c9" (UID: "1e1035a9-06b4-47c8-8781-e46a35a2f3c9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.781260 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v69s6\" (UniqueName: \"kubernetes.io/projected/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-kube-api-access-v69s6\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.781291 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.781304 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.781315 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:49 crc kubenswrapper[4784]: I1205 12:46:49.781323 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1035a9-06b4-47c8-8781-e46a35a2f3c9-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:50 crc kubenswrapper[4784]: I1205 12:46:50.076727 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:50 crc kubenswrapper[4784]: I1205 12:46:50.085336 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f66b8c67-hfrbq"] Dec 05 12:46:50 crc kubenswrapper[4784]: I1205 12:46:50.694322 4784 generic.go:334] "Generic (PLEG): container finished" podID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerID="cd1af3671b6a033ab9b96d423079150fc10f5b063cd3b5401e93f8b094f1e25b" exitCode=0 Dec 05 12:46:50 crc kubenswrapper[4784]: I1205 12:46:50.694367 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerDied","Data":"cd1af3671b6a033ab9b96d423079150fc10f5b063cd3b5401e93f8b094f1e25b"} Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.012936 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" path="/var/lib/kubelet/pods/11bdb484-f2ce-4363-a365-c6fa7a15d4ad/volumes" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.014152 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" path="/var/lib/kubelet/pods/1e1035a9-06b4-47c8-8781-e46a35a2f3c9/volumes" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.164243 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.246127 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.708078 4784 generic.go:334] "Generic (PLEG): container finished" podID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerID="f6eec2743071ba7ac1b1d7ab9a27d25e0964ab1a8dfbb2d1f6f71d92a0402bbd" exitCode=0 Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.708231 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerDied","Data":"f6eec2743071ba7ac1b1d7ab9a27d25e0964ab1a8dfbb2d1f6f71d92a0402bbd"} Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.708730 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e","Type":"ContainerDied","Data":"dd858e92d15fd7b17bad3eeb55751db80cef85cff247ed9f8a08b5da4c6cf045"} Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.708744 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd858e92d15fd7b17bad3eeb55751db80cef85cff247ed9f8a08b5da4c6cf045" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.744408 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832231 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832293 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krt8r\" (UniqueName: \"kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832400 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832514 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832544 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832587 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id\") pod \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\" (UID: \"f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e\") " Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.832971 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.838360 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts" (OuterVolumeSpecName: "scripts") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.840304 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r" (OuterVolumeSpecName: "kube-api-access-krt8r") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "kube-api-access-krt8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.842856 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.900421 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.938442 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.938473 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krt8r\" (UniqueName: \"kubernetes.io/projected/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-kube-api-access-krt8r\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.938484 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.938493 4784 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:51 crc kubenswrapper[4784]: I1205 12:46:51.938504 4784 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.002095 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data" (OuterVolumeSpecName: "config-data") pod "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" (UID: "f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.040854 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.312417 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.312491 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.313441 4784 scope.go:117] "RemoveContainer" containerID="1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.313942 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(9ce21c3e-07a5-4404-827e-367acaba9d66)\"" pod="openstack/watcher-decision-engine-0" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.716218 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.752533 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.765769 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.782808 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783327 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="init" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783354 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="init" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783378 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="dnsmasq-dns" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783410 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="dnsmasq-dns" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783433 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-api" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783442 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-api" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783466 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="probe" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783474 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="probe" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783489 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="cinder-scheduler" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783497 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="cinder-scheduler" Dec 05 12:46:52 crc kubenswrapper[4784]: E1205 12:46:52.783519 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-httpd" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783529 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-httpd" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783785 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e1035a9-06b4-47c8-8781-e46a35a2f3c9" containerName="dnsmasq-dns" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783805 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-httpd" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783815 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="cinder-scheduler" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783835 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" containerName="probe" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.783851 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="11bdb484-f2ce-4363-a365-c6fa7a15d4ad" containerName="neutron-api" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.785615 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.790753 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.796083 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.962904 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.962983 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.963022 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.963068 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7543eb24-2b15-498b-b447-9f1f47fef1f0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.963252 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-scripts\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:52 crc kubenswrapper[4784]: I1205 12:46:52.963339 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsp6w\" (UniqueName: \"kubernetes.io/projected/7543eb24-2b15-498b-b447-9f1f47fef1f0-kube-api-access-dsp6w\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.011720 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e" path="/var/lib/kubelet/pods/f1815ba7-4bb6-4aa4-af3a-3d40a9731c5e/volumes" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065665 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-scripts\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065718 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsp6w\" (UniqueName: \"kubernetes.io/projected/7543eb24-2b15-498b-b447-9f1f47fef1f0-kube-api-access-dsp6w\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065816 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065876 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065911 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.065932 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7543eb24-2b15-498b-b447-9f1f47fef1f0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.066054 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7543eb24-2b15-498b-b447-9f1f47fef1f0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.071600 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.071671 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-scripts\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.071742 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-config-data\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.072678 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7543eb24-2b15-498b-b447-9f1f47fef1f0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.088178 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsp6w\" (UniqueName: \"kubernetes.io/projected/7543eb24-2b15-498b-b447-9f1f47fef1f0-kube-api-access-dsp6w\") pod \"cinder-scheduler-0\" (UID: \"7543eb24-2b15-498b-b447-9f1f47fef1f0\") " pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.107287 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.568139 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 12:46:53 crc kubenswrapper[4784]: I1205 12:46:53.747613 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7543eb24-2b15-498b-b447-9f1f47fef1f0","Type":"ContainerStarted","Data":"d0905d54b08dd7848d44674b9e2dba2624cc034335bda993427481e80da47e7f"} Dec 05 12:46:54 crc kubenswrapper[4784]: I1205 12:46:54.760932 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7543eb24-2b15-498b-b447-9f1f47fef1f0","Type":"ContainerStarted","Data":"4c85e17ef532e226910c9d181d087720f541f77449a54b71e5c211d3aeff14f2"} Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.709243 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.786921 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7543eb24-2b15-498b-b447-9f1f47fef1f0","Type":"ContainerStarted","Data":"a29bbf7d961be5d133d32287d8a7b43c99c2be5e1d3ef7ce4ca745541a366e49"} Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.814027 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.814004055 podStartE2EDuration="3.814004055s" podCreationTimestamp="2025-12-05 12:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:46:55.807088009 +0000 UTC m=+1295.227154824" watchObservedRunningTime="2025-12-05 12:46:55.814004055 +0000 UTC m=+1295.234070870" Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.850286 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b64cff454-t47d4" Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.901912 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.902205 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c8994b8f8-lr4cl" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api-log" containerID="cri-o://766f9f440ff6ea0804d67f4f60e4fc74d51342cea217159a1984116133a23438" gracePeriod=30 Dec 05 12:46:55 crc kubenswrapper[4784]: I1205 12:46:55.902303 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c8994b8f8-lr4cl" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api" containerID="cri-o://2ca89ad316719941d6e5e0487dc6ed12df2ec0958fec84a3e10e6e022e53d93e" gracePeriod=30 Dec 05 12:46:56 crc kubenswrapper[4784]: I1205 12:46:56.797795 4784 generic.go:334] "Generic (PLEG): container finished" podID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerID="766f9f440ff6ea0804d67f4f60e4fc74d51342cea217159a1984116133a23438" exitCode=143 Dec 05 12:46:56 crc kubenswrapper[4784]: I1205 12:46:56.797970 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerDied","Data":"766f9f440ff6ea0804d67f4f60e4fc74d51342cea217159a1984116133a23438"} Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.737236 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c8994b8f8-lr4cl" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.181:9311/healthcheck\": dial tcp 10.217.0.181:9311: connect: connection refused" Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.737338 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c8994b8f8-lr4cl" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.181:9311/healthcheck\": dial tcp 10.217.0.181:9311: connect: connection refused" Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.813382 4784 generic.go:334] "Generic (PLEG): container finished" podID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerID="2ca89ad316719941d6e5e0487dc6ed12df2ec0958fec84a3e10e6e022e53d93e" exitCode=0 Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.813424 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerDied","Data":"2ca89ad316719941d6e5e0487dc6ed12df2ec0958fec84a3e10e6e022e53d93e"} Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.972835 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:57 crc kubenswrapper[4784]: I1205 12:46:57.987623 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59c9dd888d-55zdv" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.107611 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.122557 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.208864 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom\") pod \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.209052 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnrbb\" (UniqueName: \"kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb\") pod \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.209111 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs\") pod \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.209166 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle\") pod \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.209258 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data\") pod \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\" (UID: \"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc\") " Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.209664 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs" (OuterVolumeSpecName: "logs") pod "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" (UID: "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.217496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" (UID: "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.229557 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb" (OuterVolumeSpecName: "kube-api-access-vnrbb") pod "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" (UID: "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc"). InnerVolumeSpecName "kube-api-access-vnrbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.246595 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" (UID: "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.282295 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data" (OuterVolumeSpecName: "config-data") pod "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" (UID: "dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.311408 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnrbb\" (UniqueName: \"kubernetes.io/projected/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-kube-api-access-vnrbb\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.311676 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.311777 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.311878 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.311964 4784 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.432362 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.825059 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c8994b8f8-lr4cl" event={"ID":"dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc","Type":"ContainerDied","Data":"291d54cfe16dfb9253d9340d4b313c66ead36f1719dc0bde37050362ae2bef44"} Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.825107 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c8994b8f8-lr4cl" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.825131 4784 scope.go:117] "RemoveContainer" containerID="2ca89ad316719941d6e5e0487dc6ed12df2ec0958fec84a3e10e6e022e53d93e" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.860363 4784 scope.go:117] "RemoveContainer" containerID="766f9f440ff6ea0804d67f4f60e4fc74d51342cea217159a1984116133a23438" Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.871478 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:58 crc kubenswrapper[4784]: I1205 12:46:58.882558 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5c8994b8f8-lr4cl"] Dec 05 12:46:59 crc kubenswrapper[4784]: I1205 12:46:59.019316 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" path="/var/lib/kubelet/pods/dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc/volumes" Dec 05 12:46:59 crc kubenswrapper[4784]: I1205 12:46:59.244717 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6df6c9b849-hzswf" Dec 05 12:46:59 crc kubenswrapper[4784]: I1205 12:46:59.572646 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:46:59 crc kubenswrapper[4784]: I1205 12:46:59.572973 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.120177 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: E1205 12:47:00.120874 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api-log" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.120894 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api-log" Dec 05 12:47:00 crc kubenswrapper[4784]: E1205 12:47:00.120905 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.120914 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.121135 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.121162 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="dddeb6f6-dea0-4ddc-8946-ae1e3fe478fc" containerName="barbican-api-log" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.121999 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.171316 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6zzvd" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.171746 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.172391 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.198511 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.271973 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.272094 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xll62\" (UniqueName: \"kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.272173 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.272268 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.373714 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xll62\" (UniqueName: \"kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.373793 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.373836 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.373953 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.375006 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.381809 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.389856 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.394290 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xll62\" (UniqueName: \"kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62\") pod \"openstackclient\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.407282 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.408290 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.415783 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.435682 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.436900 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.456179 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.476290 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.476582 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vswz8\" (UniqueName: \"kubernetes.io/projected/a90b38e9-d09e-4f72-9d73-85c2226e4049-kube-api-access-vswz8\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.476607 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config-secret\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.476705 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: E1205 12:47:00.536575 4784 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 05 12:47:00 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_34b29b6d-3608-4aaa-973f-f051fda9685f_0(4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef" Netns:"/var/run/netns/d56d5f05-c711-4cf9-bada-937b5c043b02" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef;K8S_POD_UID=34b29b6d-3608-4aaa-973f-f051fda9685f" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/34b29b6d-3608-4aaa-973f-f051fda9685f]: expected pod UID "34b29b6d-3608-4aaa-973f-f051fda9685f" but got "a90b38e9-d09e-4f72-9d73-85c2226e4049" from Kube API Dec 05 12:47:00 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:47:00 crc kubenswrapper[4784]: > Dec 05 12:47:00 crc kubenswrapper[4784]: E1205 12:47:00.536673 4784 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 05 12:47:00 crc kubenswrapper[4784]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_34b29b6d-3608-4aaa-973f-f051fda9685f_0(4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef" Netns:"/var/run/netns/d56d5f05-c711-4cf9-bada-937b5c043b02" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=4541e6a6fc36915a6d741e25d976a30d721489285605c711d17ea3b611992cef;K8S_POD_UID=34b29b6d-3608-4aaa-973f-f051fda9685f" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/34b29b6d-3608-4aaa-973f-f051fda9685f]: expected pod UID "34b29b6d-3608-4aaa-973f-f051fda9685f" but got "a90b38e9-d09e-4f72-9d73-85c2226e4049" from Kube API Dec 05 12:47:00 crc kubenswrapper[4784]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 05 12:47:00 crc kubenswrapper[4784]: > pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.579145 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.579221 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vswz8\" (UniqueName: \"kubernetes.io/projected/a90b38e9-d09e-4f72-9d73-85c2226e4049-kube-api-access-vswz8\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.579254 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config-secret\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.579281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.580866 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.583461 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-openstack-config-secret\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.584330 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90b38e9-d09e-4f72-9d73-85c2226e4049-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.595662 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vswz8\" (UniqueName: \"kubernetes.io/projected/a90b38e9-d09e-4f72-9d73-85c2226e4049-kube-api-access-vswz8\") pod \"openstackclient\" (UID: \"a90b38e9-d09e-4f72-9d73-85c2226e4049\") " pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.640996 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.846751 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.850094 4784 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="34b29b6d-3608-4aaa-973f-f051fda9685f" podUID="a90b38e9-d09e-4f72-9d73-85c2226e4049" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.861638 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.884938 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config\") pod \"34b29b6d-3608-4aaa-973f-f051fda9685f\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.885549 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "34b29b6d-3608-4aaa-973f-f051fda9685f" (UID: "34b29b6d-3608-4aaa-973f-f051fda9685f"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.885614 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xll62\" (UniqueName: \"kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62\") pod \"34b29b6d-3608-4aaa-973f-f051fda9685f\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.885701 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle\") pod \"34b29b6d-3608-4aaa-973f-f051fda9685f\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.885748 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret\") pod \"34b29b6d-3608-4aaa-973f-f051fda9685f\" (UID: \"34b29b6d-3608-4aaa-973f-f051fda9685f\") " Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.886278 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.891411 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "34b29b6d-3608-4aaa-973f-f051fda9685f" (UID: "34b29b6d-3608-4aaa-973f-f051fda9685f"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.891853 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62" (OuterVolumeSpecName: "kube-api-access-xll62") pod "34b29b6d-3608-4aaa-973f-f051fda9685f" (UID: "34b29b6d-3608-4aaa-973f-f051fda9685f"). InnerVolumeSpecName "kube-api-access-xll62". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.892129 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34b29b6d-3608-4aaa-973f-f051fda9685f" (UID: "34b29b6d-3608-4aaa-973f-f051fda9685f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.987781 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.987829 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34b29b6d-3608-4aaa-973f-f051fda9685f-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:00 crc kubenswrapper[4784]: I1205 12:47:00.987845 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xll62\" (UniqueName: \"kubernetes.io/projected/34b29b6d-3608-4aaa-973f-f051fda9685f-kube-api-access-xll62\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:01 crc kubenswrapper[4784]: I1205 12:47:01.021290 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34b29b6d-3608-4aaa-973f-f051fda9685f" path="/var/lib/kubelet/pods/34b29b6d-3608-4aaa-973f-f051fda9685f/volumes" Dec 05 12:47:01 crc kubenswrapper[4784]: W1205 12:47:01.085559 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda90b38e9_d09e_4f72_9d73_85c2226e4049.slice/crio-bcae3c06ca97ee781031d6781edeeb3519464fb91d6fa0be0a8d24bf2b74821c WatchSource:0}: Error finding container bcae3c06ca97ee781031d6781edeeb3519464fb91d6fa0be0a8d24bf2b74821c: Status 404 returned error can't find the container with id bcae3c06ca97ee781031d6781edeeb3519464fb91d6fa0be0a8d24bf2b74821c Dec 05 12:47:01 crc kubenswrapper[4784]: I1205 12:47:01.087059 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 12:47:01 crc kubenswrapper[4784]: I1205 12:47:01.857848 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 12:47:01 crc kubenswrapper[4784]: I1205 12:47:01.857896 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a90b38e9-d09e-4f72-9d73-85c2226e4049","Type":"ContainerStarted","Data":"bcae3c06ca97ee781031d6781edeeb3519464fb91d6fa0be0a8d24bf2b74821c"} Dec 05 12:47:01 crc kubenswrapper[4784]: I1205 12:47:01.870700 4784 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="34b29b6d-3608-4aaa-973f-f051fda9685f" podUID="a90b38e9-d09e-4f72-9d73-85c2226e4049" Dec 05 12:47:02 crc kubenswrapper[4784]: I1205 12:47:02.336501 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:02 crc kubenswrapper[4784]: I1205 12:47:02.336932 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:02 crc kubenswrapper[4784]: I1205 12:47:02.337396 4784 scope.go:117] "RemoveContainer" containerID="1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3" Dec 05 12:47:02 crc kubenswrapper[4784]: I1205 12:47:02.875063 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerStarted","Data":"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c"} Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.302151 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.886979 4784 generic.go:334] "Generic (PLEG): container finished" podID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerID="427abff3eac7473e838fa85f2097041270a6b80a6e6f065dfda211890bb77d19" exitCode=137 Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.887067 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerDied","Data":"427abff3eac7473e838fa85f2097041270a6b80a6e6f065dfda211890bb77d19"} Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.887305 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5884d57c44-l8tbz" event={"ID":"e78e5147-155e-4027-91ca-bf7e107f5b88","Type":"ContainerDied","Data":"7c80f143aa686ba7b4ffc83f9555b71a70da036b98f4541df45777946174d544"} Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.887327 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c80f143aa686ba7b4ffc83f9555b71a70da036b98f4541df45777946174d544" Dec 05 12:47:03 crc kubenswrapper[4784]: I1205 12:47:03.991901 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172177 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172249 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172311 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4c48\" (UniqueName: \"kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172476 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172539 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172605 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.172640 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key\") pod \"e78e5147-155e-4027-91ca-bf7e107f5b88\" (UID: \"e78e5147-155e-4027-91ca-bf7e107f5b88\") " Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.173158 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs" (OuterVolumeSpecName: "logs") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.173757 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e78e5147-155e-4027-91ca-bf7e107f5b88-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.181215 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.205954 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts" (OuterVolumeSpecName: "scripts") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.206099 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48" (OuterVolumeSpecName: "kube-api-access-g4c48") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "kube-api-access-g4c48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.207584 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data" (OuterVolumeSpecName: "config-data") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.235151 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.241693 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "e78e5147-155e-4027-91ca-bf7e107f5b88" (UID: "e78e5147-155e-4027-91ca-bf7e107f5b88"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275069 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275104 4784 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275116 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275125 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e78e5147-155e-4027-91ca-bf7e107f5b88-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275134 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4c48\" (UniqueName: \"kubernetes.io/projected/e78e5147-155e-4027-91ca-bf7e107f5b88-kube-api-access-g4c48\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.275144 4784 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e78e5147-155e-4027-91ca-bf7e107f5b88-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.898262 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5884d57c44-l8tbz" Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.936226 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:47:04 crc kubenswrapper[4784]: I1205 12:47:04.945542 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5884d57c44-l8tbz"] Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.027550 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" path="/var/lib/kubelet/pods/e78e5147-155e-4027-91ca-bf7e107f5b88/volumes" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.883654 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-664869ddc-d4x9g"] Dec 05 12:47:05 crc kubenswrapper[4784]: E1205 12:47:05.884383 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.884406 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon" Dec 05 12:47:05 crc kubenswrapper[4784]: E1205 12:47:05.884449 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon-log" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.884457 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon-log" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.884725 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.884751 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78e5147-155e-4027-91ca-bf7e107f5b88" containerName="horizon-log" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.885878 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.888865 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.891347 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.891943 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 12:47:05 crc kubenswrapper[4784]: I1205 12:47:05.901214 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-664869ddc-d4x9g"] Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009425 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-combined-ca-bundle\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009505 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-public-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009550 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-run-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009607 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-etc-swift\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009630 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn6vh\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-kube-api-access-gn6vh\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009731 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-log-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009790 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-internal-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.009812 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-config-data\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113409 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-log-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113488 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-internal-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113511 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-config-data\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113530 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-combined-ca-bundle\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113562 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-public-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113589 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-run-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113628 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-etc-swift\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.113646 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn6vh\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-kube-api-access-gn6vh\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.114512 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-log-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.116092 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-run-httpd\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.120825 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-etc-swift\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.123222 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-public-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.123949 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-combined-ca-bundle\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.128151 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-internal-tls-certs\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.129686 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn6vh\" (UniqueName: \"kubernetes.io/projected/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-kube-api-access-gn6vh\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.135670 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbdfc62e-030d-47fb-bcd5-ea38da412eb6-config-data\") pod \"swift-proxy-664869ddc-d4x9g\" (UID: \"dbdfc62e-030d-47fb-bcd5-ea38da412eb6\") " pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.262269 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.456136 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.456812 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-central-agent" containerID="cri-o://a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6" gracePeriod=30 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.457307 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="proxy-httpd" containerID="cri-o://1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb" gracePeriod=30 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.457322 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-notification-agent" containerID="cri-o://10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d" gracePeriod=30 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.457337 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="sg-core" containerID="cri-o://74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0" gracePeriod=30 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.477073 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.185:3000/\": EOF" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.931672 4784 generic.go:334] "Generic (PLEG): container finished" podID="f62acf64-20e7-4e17-be2d-640a38de004f" containerID="1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb" exitCode=0 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.931705 4784 generic.go:334] "Generic (PLEG): container finished" podID="f62acf64-20e7-4e17-be2d-640a38de004f" containerID="74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0" exitCode=2 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.931746 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerDied","Data":"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb"} Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.931785 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerDied","Data":"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0"} Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.934245 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" exitCode=1 Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.934286 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerDied","Data":"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c"} Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.934318 4784 scope.go:117] "RemoveContainer" containerID="1877b82f9ddf0bdc85406d9005c4d441a13f9f4235a6c129d7cfe7724ccfd4e3" Dec 05 12:47:06 crc kubenswrapper[4784]: I1205 12:47:06.935078 4784 scope.go:117] "RemoveContainer" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:06 crc kubenswrapper[4784]: E1205 12:47:06.935376 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(9ce21c3e-07a5-4404-827e-367acaba9d66)\"" pod="openstack/watcher-decision-engine-0" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" Dec 05 12:47:07 crc kubenswrapper[4784]: I1205 12:47:07.947979 4784 generic.go:334] "Generic (PLEG): container finished" podID="f62acf64-20e7-4e17-be2d-640a38de004f" containerID="a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6" exitCode=0 Dec 05 12:47:07 crc kubenswrapper[4784]: I1205 12:47:07.948045 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerDied","Data":"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6"} Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.596043 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-664869ddc-d4x9g"] Dec 05 12:47:10 crc kubenswrapper[4784]: W1205 12:47:10.600138 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbdfc62e_030d_47fb_bcd5_ea38da412eb6.slice/crio-2c97801263d9697e4b472d4f5bac5c8ae20bd2749ec8ec184fef5973d6326eb5 WatchSource:0}: Error finding container 2c97801263d9697e4b472d4f5bac5c8ae20bd2749ec8ec184fef5973d6326eb5: Status 404 returned error can't find the container with id 2c97801263d9697e4b472d4f5bac5c8ae20bd2749ec8ec184fef5973d6326eb5 Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.982683 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a90b38e9-d09e-4f72-9d73-85c2226e4049","Type":"ContainerStarted","Data":"e24f6a172dd89dbaafe83debfd5f4720cb016b01aed4605910312b86187c68e6"} Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.984169 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-664869ddc-d4x9g" event={"ID":"dbdfc62e-030d-47fb-bcd5-ea38da412eb6","Type":"ContainerStarted","Data":"1ce867d0b2d6ca625071bf8bc2b9229195db29c1b65eb281fdfb5b9510fe5922"} Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.984330 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.984434 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-664869ddc-d4x9g" event={"ID":"dbdfc62e-030d-47fb-bcd5-ea38da412eb6","Type":"ContainerStarted","Data":"4f7190b2ef3c57439c8ecaacb336cb60dfed196503ff2298e3ca0aa2b2fa05a2"} Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.984519 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-664869ddc-d4x9g" event={"ID":"dbdfc62e-030d-47fb-bcd5-ea38da412eb6","Type":"ContainerStarted","Data":"2c97801263d9697e4b472d4f5bac5c8ae20bd2749ec8ec184fef5973d6326eb5"} Dec 05 12:47:10 crc kubenswrapper[4784]: I1205 12:47:10.984596 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.012779 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.026987847 podStartE2EDuration="11.012759731s" podCreationTimestamp="2025-12-05 12:47:00 +0000 UTC" firstStartedPulling="2025-12-05 12:47:01.08761609 +0000 UTC m=+1300.507682905" lastFinishedPulling="2025-12-05 12:47:10.073387964 +0000 UTC m=+1309.493454789" observedRunningTime="2025-12-05 12:47:11.003688528 +0000 UTC m=+1310.423755393" watchObservedRunningTime="2025-12-05 12:47:11.012759731 +0000 UTC m=+1310.432826546" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.046813 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-664869ddc-d4x9g" podStartSLOduration=6.046787913 podStartE2EDuration="6.046787913s" podCreationTimestamp="2025-12-05 12:47:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:11.037668698 +0000 UTC m=+1310.457735513" watchObservedRunningTime="2025-12-05 12:47:11.046787913 +0000 UTC m=+1310.466854728" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.588657 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-7rms5"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.590051 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.613204 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7rms5"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.617835 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.619242 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln9g4\" (UniqueName: \"kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.619351 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.685891 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-bjf5q"] Dec 05 12:47:11 crc kubenswrapper[4784]: E1205 12:47:11.686354 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="proxy-httpd" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686372 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="proxy-httpd" Dec 05 12:47:11 crc kubenswrapper[4784]: E1205 12:47:11.686390 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="sg-core" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686397 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="sg-core" Dec 05 12:47:11 crc kubenswrapper[4784]: E1205 12:47:11.686410 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-notification-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686417 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-notification-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: E1205 12:47:11.686426 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-central-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686432 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-central-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686619 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="sg-core" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686642 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-central-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686660 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="ceilometer-notification-agent" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.686669 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" containerName="proxy-httpd" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.687325 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.702232 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8da5-account-create-update-hblwl"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.703788 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.709762 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.722939 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.723161 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.723414 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.723621 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.724248 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.724362 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.724528 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr4r6\" (UniqueName: \"kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6\") pod \"f62acf64-20e7-4e17-be2d-640a38de004f\" (UID: \"f62acf64-20e7-4e17-be2d-640a38de004f\") " Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.729270 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.729326 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.730853 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln9g4\" (UniqueName: \"kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.731015 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.731142 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpsdp\" (UniqueName: \"kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.731219 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.742238 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts" (OuterVolumeSpecName: "scripts") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.753105 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.753138 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f62acf64-20e7-4e17-be2d-640a38de004f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.758517 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bjf5q"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.773574 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.783797 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln9g4\" (UniqueName: \"kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4\") pod \"nova-api-db-create-7rms5\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.798169 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.798599 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6" (OuterVolumeSpecName: "kube-api-access-fr4r6") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "kube-api-access-fr4r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.817444 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8da5-account-create-update-hblwl"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857354 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8gbk\" (UniqueName: \"kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857428 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857496 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpsdp\" (UniqueName: \"kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857555 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857606 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857618 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr4r6\" (UniqueName: \"kubernetes.io/projected/f62acf64-20e7-4e17-be2d-640a38de004f-kube-api-access-fr4r6\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.857627 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.862946 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-n6b2l"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.865599 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.873501 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.882705 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n6b2l"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.887813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpsdp\" (UniqueName: \"kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp\") pod \"nova-cell0-db-create-bjf5q\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.896100 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.896936 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-3537-account-create-update-gb6qr"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.899409 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.900999 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.909277 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3537-account-create-update-gb6qr"] Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.919387 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data" (OuterVolumeSpecName: "config-data") pod "f62acf64-20e7-4e17-be2d-640a38de004f" (UID: "f62acf64-20e7-4e17-be2d-640a38de004f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.928788 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959426 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959483 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkskn\" (UniqueName: \"kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959578 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959607 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbvvz\" (UniqueName: \"kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959638 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959721 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8gbk\" (UniqueName: \"kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959870 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.959898 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f62acf64-20e7-4e17-be2d-640a38de004f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.960651 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:11 crc kubenswrapper[4784]: I1205 12:47:11.976650 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8gbk\" (UniqueName: \"kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk\") pod \"nova-api-8da5-account-create-update-hblwl\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.005660 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.032195 4784 generic.go:334] "Generic (PLEG): container finished" podID="f62acf64-20e7-4e17-be2d-640a38de004f" containerID="10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d" exitCode=0 Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.033332 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerDied","Data":"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d"} Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.033365 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.033394 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f62acf64-20e7-4e17-be2d-640a38de004f","Type":"ContainerDied","Data":"8e3594415ca105984f24daadd0efe4b5ee77eef5b1f71799640235995207d0c6"} Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.033415 4784 scope.go:117] "RemoveContainer" containerID="1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.038388 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.061279 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.061309 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbvvz\" (UniqueName: \"kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.061440 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkskn\" (UniqueName: \"kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.061459 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.063438 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.063900 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.093947 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbvvz\" (UniqueName: \"kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz\") pod \"nova-cell1-db-create-n6b2l\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.105696 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkskn\" (UniqueName: \"kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn\") pod \"nova-cell0-3537-account-create-update-gb6qr\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.119341 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-7701-account-create-update-mcnx7"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.130026 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.133900 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.143807 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-7701-account-create-update-mcnx7"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.196826 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.221510 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.242807 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.247816 4784 scope.go:117] "RemoveContainer" containerID="74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.259511 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.266610 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwcjp\" (UniqueName: \"kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.267601 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.273789 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.276063 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.289657 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.300298 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.310418 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.312081 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.312168 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.312903 4784 scope.go:117] "RemoveContainer" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:12 crc kubenswrapper[4784]: E1205 12:47:12.313242 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(9ce21c3e-07a5-4404-827e-367acaba9d66)\"" pod="openstack/watcher-decision-engine-0" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.333579 4784 scope.go:117] "RemoveContainer" containerID="10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369002 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369209 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22klm\" (UniqueName: \"kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369230 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369255 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369314 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369377 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwcjp\" (UniqueName: \"kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369439 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369459 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.369481 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.371438 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.381137 4784 scope.go:117] "RemoveContainer" containerID="a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.391330 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwcjp\" (UniqueName: \"kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp\") pod \"nova-cell1-7701-account-create-update-mcnx7\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.428931 4784 scope.go:117] "RemoveContainer" containerID="1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb" Dec 05 12:47:12 crc kubenswrapper[4784]: E1205 12:47:12.432389 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb\": container with ID starting with 1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb not found: ID does not exist" containerID="1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.432431 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb"} err="failed to get container status \"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb\": rpc error: code = NotFound desc = could not find container \"1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb\": container with ID starting with 1acdd12e0d5530729e4cc92bc7b9d0f3f0ac72d205f65877bdbe09482a1361bb not found: ID does not exist" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.432457 4784 scope.go:117] "RemoveContainer" containerID="74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0" Dec 05 12:47:12 crc kubenswrapper[4784]: E1205 12:47:12.432818 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0\": container with ID starting with 74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0 not found: ID does not exist" containerID="74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.432861 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0"} err="failed to get container status \"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0\": rpc error: code = NotFound desc = could not find container \"74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0\": container with ID starting with 74ce7688adb1a066cff1d530118bbaf3c08402ad14706e908d713e3e74dabbf0 not found: ID does not exist" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.432887 4784 scope.go:117] "RemoveContainer" containerID="10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d" Dec 05 12:47:12 crc kubenswrapper[4784]: E1205 12:47:12.433200 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d\": container with ID starting with 10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d not found: ID does not exist" containerID="10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.433228 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d"} err="failed to get container status \"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d\": rpc error: code = NotFound desc = could not find container \"10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d\": container with ID starting with 10e21568116bb7d039c08e10eb44ff3863b9da7e71581feaf91d9e54cbe0426d not found: ID does not exist" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.433241 4784 scope.go:117] "RemoveContainer" containerID="a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6" Dec 05 12:47:12 crc kubenswrapper[4784]: E1205 12:47:12.433746 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6\": container with ID starting with a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6 not found: ID does not exist" containerID="a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.433767 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6"} err="failed to get container status \"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6\": rpc error: code = NotFound desc = could not find container \"a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6\": container with ID starting with a9a505cdd064b44dd8f1e428c74b53b8b832e0796eb7a184328a97a2536ad7a6 not found: ID does not exist" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.470952 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.470999 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471047 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471136 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22klm\" (UniqueName: \"kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471570 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.471784 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.479241 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.479635 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.479928 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.482587 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.487479 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22klm\" (UniqueName: \"kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm\") pod \"ceilometer-0\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.499392 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7rms5"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.530655 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.662946 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.665176 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8da5-account-create-update-hblwl"] Dec 05 12:47:12 crc kubenswrapper[4784]: W1205 12:47:12.703838 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a5de896_0d89_42c3_a59b_d07f226e76dc.slice/crio-61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6 WatchSource:0}: Error finding container 61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6: Status 404 returned error can't find the container with id 61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6 Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.771067 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bjf5q"] Dec 05 12:47:12 crc kubenswrapper[4784]: W1205 12:47:12.798875 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d6e94d_da77_4211_8009_cec8d1ae70b4.slice/crio-7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef WatchSource:0}: Error finding container 7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef: Status 404 returned error can't find the container with id 7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.896757 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n6b2l"] Dec 05 12:47:12 crc kubenswrapper[4784]: I1205 12:47:12.904182 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3537-account-create-update-gb6qr"] Dec 05 12:47:12 crc kubenswrapper[4784]: W1205 12:47:12.911456 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64467fbc_3720_453a_af74_aee0374aaa3a.slice/crio-0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89 WatchSource:0}: Error finding container 0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89: Status 404 returned error can't find the container with id 0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89 Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.011741 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f62acf64-20e7-4e17-be2d-640a38de004f" path="/var/lib/kubelet/pods/f62acf64-20e7-4e17-be2d-640a38de004f/volumes" Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.103925 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n6b2l" event={"ID":"64467fbc-3720-453a-af74-aee0374aaa3a","Type":"ContainerStarted","Data":"0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.109315 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" event={"ID":"124aed59-cf1a-4150-adf5-a055107f2834","Type":"ContainerStarted","Data":"17e2bdc6fefaa215b90cba6cb5219405da7a75d1a4b4046eb1c0f28e926759af"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.110429 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rms5" event={"ID":"d81b0108-f196-4724-90bb-c60348271f96","Type":"ContainerStarted","Data":"69b9d3a278579160fd9883a122e79c24ea4a2bd12016697befcfbf4c2c7efeda"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.110453 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rms5" event={"ID":"d81b0108-f196-4724-90bb-c60348271f96","Type":"ContainerStarted","Data":"dc8543ec6740424ec9f3803cf10843f7376f8d751aaca4fa2ad4c5377333bfe9"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.124176 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bjf5q" event={"ID":"96d6e94d-da77-4211-8009-cec8d1ae70b4","Type":"ContainerStarted","Data":"7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.127404 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-7701-account-create-update-mcnx7"] Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.162142 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8da5-account-create-update-hblwl" event={"ID":"6a5de896-0d89-42c3-a59b-d07f226e76dc","Type":"ContainerStarted","Data":"61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6"} Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.164968 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-7rms5" podStartSLOduration=2.164955075 podStartE2EDuration="2.164955075s" podCreationTimestamp="2025-12-05 12:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:13.135502807 +0000 UTC m=+1312.555569622" watchObservedRunningTime="2025-12-05 12:47:13.164955075 +0000 UTC m=+1312.585021890" Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.171670 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-bjf5q" podStartSLOduration=2.171659174 podStartE2EDuration="2.171659174s" podCreationTimestamp="2025-12-05 12:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:13.151940109 +0000 UTC m=+1312.572006924" watchObservedRunningTime="2025-12-05 12:47:13.171659174 +0000 UTC m=+1312.591725989" Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.196928 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-8da5-account-create-update-hblwl" podStartSLOduration=2.196908562 podStartE2EDuration="2.196908562s" podCreationTimestamp="2025-12-05 12:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:13.177820407 +0000 UTC m=+1312.597887222" watchObservedRunningTime="2025-12-05 12:47:13.196908562 +0000 UTC m=+1312.616975377" Dec 05 12:47:13 crc kubenswrapper[4784]: I1205 12:47:13.247624 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:13 crc kubenswrapper[4784]: W1205 12:47:13.251844 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6e4d90c_41c9_47ea_9d4b_dfdd323fd97a.slice/crio-1cceb2977a103b0010ed3dcfa107b2de5ff3106954a64b15b57636fa3f37441d WatchSource:0}: Error finding container 1cceb2977a103b0010ed3dcfa107b2de5ff3106954a64b15b57636fa3f37441d: Status 404 returned error can't find the container with id 1cceb2977a103b0010ed3dcfa107b2de5ff3106954a64b15b57636fa3f37441d Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.170334 4784 generic.go:334] "Generic (PLEG): container finished" podID="d81b0108-f196-4724-90bb-c60348271f96" containerID="69b9d3a278579160fd9883a122e79c24ea4a2bd12016697befcfbf4c2c7efeda" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.170447 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rms5" event={"ID":"d81b0108-f196-4724-90bb-c60348271f96","Type":"ContainerDied","Data":"69b9d3a278579160fd9883a122e79c24ea4a2bd12016697befcfbf4c2c7efeda"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.174154 4784 generic.go:334] "Generic (PLEG): container finished" podID="96d6e94d-da77-4211-8009-cec8d1ae70b4" containerID="1213d3b0b9294f443d495b5c6d9c5553af7b9da7088f306c89099523e7cb4934" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.174251 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bjf5q" event={"ID":"96d6e94d-da77-4211-8009-cec8d1ae70b4","Type":"ContainerDied","Data":"1213d3b0b9294f443d495b5c6d9c5553af7b9da7088f306c89099523e7cb4934"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.176514 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerStarted","Data":"2a89169972b180f481da8f5c97aa06ae28c5c3ef0bbcc4e9f7c97c3de073c8d0"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.176546 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerStarted","Data":"db71b94d5262c5125beb4691a9db6daa8bdd39fdb8c7562ae7af96d05e171c57"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.176555 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerStarted","Data":"1cceb2977a103b0010ed3dcfa107b2de5ff3106954a64b15b57636fa3f37441d"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.178174 4784 generic.go:334] "Generic (PLEG): container finished" podID="bb722f31-d720-4de5-9a0e-cdbb3af2a535" containerID="aa78a190a91f98432378dd813c10b6fabd8064d364217c443227c576cce83f68" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.178252 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" event={"ID":"bb722f31-d720-4de5-9a0e-cdbb3af2a535","Type":"ContainerDied","Data":"aa78a190a91f98432378dd813c10b6fabd8064d364217c443227c576cce83f68"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.178271 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" event={"ID":"bb722f31-d720-4de5-9a0e-cdbb3af2a535","Type":"ContainerStarted","Data":"5098c6130ad8b461a119ff88de502259934f398d61f8d5c9bd73d7716039ece6"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.183969 4784 generic.go:334] "Generic (PLEG): container finished" podID="6a5de896-0d89-42c3-a59b-d07f226e76dc" containerID="153b9c8c09bb1983c0bd0729b56ac8c11d53017d9e5d21afcf783bc4e7b3c359" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.184123 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8da5-account-create-update-hblwl" event={"ID":"6a5de896-0d89-42c3-a59b-d07f226e76dc","Type":"ContainerDied","Data":"153b9c8c09bb1983c0bd0729b56ac8c11d53017d9e5d21afcf783bc4e7b3c359"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.186286 4784 generic.go:334] "Generic (PLEG): container finished" podID="64467fbc-3720-453a-af74-aee0374aaa3a" containerID="9ca5c5ea89f132cef3d5fd189b8aa7413fcde4f29888afc5734f2988548f38f5" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.186348 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n6b2l" event={"ID":"64467fbc-3720-453a-af74-aee0374aaa3a","Type":"ContainerDied","Data":"9ca5c5ea89f132cef3d5fd189b8aa7413fcde4f29888afc5734f2988548f38f5"} Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.187976 4784 generic.go:334] "Generic (PLEG): container finished" podID="124aed59-cf1a-4150-adf5-a055107f2834" containerID="96a2703a1293e4e404b140d7cf2771dc71d0134eac8f0652208c1b8a2129a643" exitCode=0 Dec 05 12:47:14 crc kubenswrapper[4784]: I1205 12:47:14.188007 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" event={"ID":"124aed59-cf1a-4150-adf5-a055107f2834","Type":"ContainerDied","Data":"96a2703a1293e4e404b140d7cf2771dc71d0134eac8f0652208c1b8a2129a643"} Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.133040 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.665669 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.680236 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.750262 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts\") pod \"d81b0108-f196-4724-90bb-c60348271f96\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.750320 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwcjp\" (UniqueName: \"kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp\") pod \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.750423 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln9g4\" (UniqueName: \"kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4\") pod \"d81b0108-f196-4724-90bb-c60348271f96\" (UID: \"d81b0108-f196-4724-90bb-c60348271f96\") " Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.750561 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts\") pod \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\" (UID: \"bb722f31-d720-4de5-9a0e-cdbb3af2a535\") " Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.752988 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bb722f31-d720-4de5-9a0e-cdbb3af2a535" (UID: "bb722f31-d720-4de5-9a0e-cdbb3af2a535"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.761700 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4" (OuterVolumeSpecName: "kube-api-access-ln9g4") pod "d81b0108-f196-4724-90bb-c60348271f96" (UID: "d81b0108-f196-4724-90bb-c60348271f96"). InnerVolumeSpecName "kube-api-access-ln9g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.762421 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp" (OuterVolumeSpecName: "kube-api-access-mwcjp") pod "bb722f31-d720-4de5-9a0e-cdbb3af2a535" (UID: "bb722f31-d720-4de5-9a0e-cdbb3af2a535"). InnerVolumeSpecName "kube-api-access-mwcjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.790222 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d81b0108-f196-4724-90bb-c60348271f96" (UID: "d81b0108-f196-4724-90bb-c60348271f96"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.852501 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d81b0108-f196-4724-90bb-c60348271f96-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.852531 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwcjp\" (UniqueName: \"kubernetes.io/projected/bb722f31-d720-4de5-9a0e-cdbb3af2a535-kube-api-access-mwcjp\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.852542 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln9g4\" (UniqueName: \"kubernetes.io/projected/d81b0108-f196-4724-90bb-c60348271f96-kube-api-access-ln9g4\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.852550 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb722f31-d720-4de5-9a0e-cdbb3af2a535-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.909793 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.917304 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.928578 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:15 crc kubenswrapper[4784]: I1205 12:47:15.978581 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.055934 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbvvz\" (UniqueName: \"kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz\") pod \"64467fbc-3720-453a-af74-aee0374aaa3a\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.055999 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts\") pod \"64467fbc-3720-453a-af74-aee0374aaa3a\" (UID: \"64467fbc-3720-453a-af74-aee0374aaa3a\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056106 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts\") pod \"6a5de896-0d89-42c3-a59b-d07f226e76dc\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056149 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts\") pod \"124aed59-cf1a-4150-adf5-a055107f2834\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056235 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts\") pod \"96d6e94d-da77-4211-8009-cec8d1ae70b4\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056253 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkskn\" (UniqueName: \"kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn\") pod \"124aed59-cf1a-4150-adf5-a055107f2834\" (UID: \"124aed59-cf1a-4150-adf5-a055107f2834\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056322 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpsdp\" (UniqueName: \"kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp\") pod \"96d6e94d-da77-4211-8009-cec8d1ae70b4\" (UID: \"96d6e94d-da77-4211-8009-cec8d1ae70b4\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.056356 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8gbk\" (UniqueName: \"kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk\") pod \"6a5de896-0d89-42c3-a59b-d07f226e76dc\" (UID: \"6a5de896-0d89-42c3-a59b-d07f226e76dc\") " Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.057672 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "124aed59-cf1a-4150-adf5-a055107f2834" (UID: "124aed59-cf1a-4150-adf5-a055107f2834"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.060741 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96d6e94d-da77-4211-8009-cec8d1ae70b4" (UID: "96d6e94d-da77-4211-8009-cec8d1ae70b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.060754 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64467fbc-3720-453a-af74-aee0374aaa3a" (UID: "64467fbc-3720-453a-af74-aee0374aaa3a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.060845 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6a5de896-0d89-42c3-a59b-d07f226e76dc" (UID: "6a5de896-0d89-42c3-a59b-d07f226e76dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.060991 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk" (OuterVolumeSpecName: "kube-api-access-n8gbk") pod "6a5de896-0d89-42c3-a59b-d07f226e76dc" (UID: "6a5de896-0d89-42c3-a59b-d07f226e76dc"). InnerVolumeSpecName "kube-api-access-n8gbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.061015 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz" (OuterVolumeSpecName: "kube-api-access-tbvvz") pod "64467fbc-3720-453a-af74-aee0374aaa3a" (UID: "64467fbc-3720-453a-af74-aee0374aaa3a"). InnerVolumeSpecName "kube-api-access-tbvvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.063223 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn" (OuterVolumeSpecName: "kube-api-access-lkskn") pod "124aed59-cf1a-4150-adf5-a055107f2834" (UID: "124aed59-cf1a-4150-adf5-a055107f2834"). InnerVolumeSpecName "kube-api-access-lkskn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.063592 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp" (OuterVolumeSpecName: "kube-api-access-bpsdp") pod "96d6e94d-da77-4211-8009-cec8d1ae70b4" (UID: "96d6e94d-da77-4211-8009-cec8d1ae70b4"). InnerVolumeSpecName "kube-api-access-bpsdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159514 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/124aed59-cf1a-4150-adf5-a055107f2834-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159594 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d6e94d-da77-4211-8009-cec8d1ae70b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159608 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkskn\" (UniqueName: \"kubernetes.io/projected/124aed59-cf1a-4150-adf5-a055107f2834-kube-api-access-lkskn\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159624 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpsdp\" (UniqueName: \"kubernetes.io/projected/96d6e94d-da77-4211-8009-cec8d1ae70b4-kube-api-access-bpsdp\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159636 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8gbk\" (UniqueName: \"kubernetes.io/projected/6a5de896-0d89-42c3-a59b-d07f226e76dc-kube-api-access-n8gbk\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159647 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbvvz\" (UniqueName: \"kubernetes.io/projected/64467fbc-3720-453a-af74-aee0374aaa3a-kube-api-access-tbvvz\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159655 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64467fbc-3720-453a-af74-aee0374aaa3a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.159664 4784 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5de896-0d89-42c3-a59b-d07f226e76dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.212642 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" event={"ID":"bb722f31-d720-4de5-9a0e-cdbb3af2a535","Type":"ContainerDied","Data":"5098c6130ad8b461a119ff88de502259934f398d61f8d5c9bd73d7716039ece6"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.212701 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5098c6130ad8b461a119ff88de502259934f398d61f8d5c9bd73d7716039ece6" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.212663 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7701-account-create-update-mcnx7" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.221560 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8da5-account-create-update-hblwl" event={"ID":"6a5de896-0d89-42c3-a59b-d07f226e76dc","Type":"ContainerDied","Data":"61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.221599 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61391fe55f25fcc02bb52ba3aca1873a4dd19c928f96356eeea0cade7bbb76b6" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.221600 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8da5-account-create-update-hblwl" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.226008 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n6b2l" event={"ID":"64467fbc-3720-453a-af74-aee0374aaa3a","Type":"ContainerDied","Data":"0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.226050 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cce0e81c8eb1bca6dbb3d19e6790437779299761a583a68f37485b92838ba89" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.226108 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n6b2l" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.230738 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" event={"ID":"124aed59-cf1a-4150-adf5-a055107f2834","Type":"ContainerDied","Data":"17e2bdc6fefaa215b90cba6cb5219405da7a75d1a4b4046eb1c0f28e926759af"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.230777 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17e2bdc6fefaa215b90cba6cb5219405da7a75d1a4b4046eb1c0f28e926759af" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.230837 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3537-account-create-update-gb6qr" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.242979 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rms5" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.242991 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rms5" event={"ID":"d81b0108-f196-4724-90bb-c60348271f96","Type":"ContainerDied","Data":"dc8543ec6740424ec9f3803cf10843f7376f8d751aaca4fa2ad4c5377333bfe9"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.243032 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc8543ec6740424ec9f3803cf10843f7376f8d751aaca4fa2ad4c5377333bfe9" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.246051 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bjf5q" event={"ID":"96d6e94d-da77-4211-8009-cec8d1ae70b4","Type":"ContainerDied","Data":"7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef"} Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.246077 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bce7e74416ebe8d8ae2d7042b4da4b4912bbb8218abf3e4b94a04baccd758ef" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.246120 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bjf5q" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.267825 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:16 crc kubenswrapper[4784]: I1205 12:47:16.274134 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-664869ddc-d4x9g" Dec 05 12:47:18 crc kubenswrapper[4784]: I1205 12:47:18.278390 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerStarted","Data":"5fd48bf457e80858858a56106210db056bc8c128a2acc4a4203aaa761523354b"} Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.302772 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerStarted","Data":"6b0b59669c7fe1b85d646c7829f5bc79249a0a863f46a0aafdb0c060a663606d"} Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.303359 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.302909 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-central-agent" containerID="cri-o://db71b94d5262c5125beb4691a9db6daa8bdd39fdb8c7562ae7af96d05e171c57" gracePeriod=30 Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.303405 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="proxy-httpd" containerID="cri-o://6b0b59669c7fe1b85d646c7829f5bc79249a0a863f46a0aafdb0c060a663606d" gracePeriod=30 Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.303504 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="sg-core" containerID="cri-o://5fd48bf457e80858858a56106210db056bc8c128a2acc4a4203aaa761523354b" gracePeriod=30 Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.303499 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-notification-agent" containerID="cri-o://2a89169972b180f481da8f5c97aa06ae28c5c3ef0bbcc4e9f7c97c3de073c8d0" gracePeriod=30 Dec 05 12:47:20 crc kubenswrapper[4784]: I1205 12:47:20.332853 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9737414599999998 podStartE2EDuration="8.33283366s" podCreationTimestamp="2025-12-05 12:47:12 +0000 UTC" firstStartedPulling="2025-12-05 12:47:13.254728985 +0000 UTC m=+1312.674795800" lastFinishedPulling="2025-12-05 12:47:19.613821185 +0000 UTC m=+1319.033888000" observedRunningTime="2025-12-05 12:47:20.323247771 +0000 UTC m=+1319.743314606" watchObservedRunningTime="2025-12-05 12:47:20.33283366 +0000 UTC m=+1319.752900475" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315181 4784 generic.go:334] "Generic (PLEG): container finished" podID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerID="6b0b59669c7fe1b85d646c7829f5bc79249a0a863f46a0aafdb0c060a663606d" exitCode=0 Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315481 4784 generic.go:334] "Generic (PLEG): container finished" podID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerID="5fd48bf457e80858858a56106210db056bc8c128a2acc4a4203aaa761523354b" exitCode=2 Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315494 4784 generic.go:334] "Generic (PLEG): container finished" podID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerID="2a89169972b180f481da8f5c97aa06ae28c5c3ef0bbcc4e9f7c97c3de073c8d0" exitCode=0 Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315501 4784 generic.go:334] "Generic (PLEG): container finished" podID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerID="db71b94d5262c5125beb4691a9db6daa8bdd39fdb8c7562ae7af96d05e171c57" exitCode=0 Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315232 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerDied","Data":"6b0b59669c7fe1b85d646c7829f5bc79249a0a863f46a0aafdb0c060a663606d"} Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315531 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerDied","Data":"5fd48bf457e80858858a56106210db056bc8c128a2acc4a4203aaa761523354b"} Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315543 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerDied","Data":"2a89169972b180f481da8f5c97aa06ae28c5c3ef0bbcc4e9f7c97c3de073c8d0"} Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.315552 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerDied","Data":"db71b94d5262c5125beb4691a9db6daa8bdd39fdb8c7562ae7af96d05e171c57"} Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.617792 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799150 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799259 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799284 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799354 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799376 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22klm\" (UniqueName: \"kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799418 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.799451 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data\") pod \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\" (UID: \"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a\") " Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.800379 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.802997 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.805985 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts" (OuterVolumeSpecName: "scripts") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.825434 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm" (OuterVolumeSpecName: "kube-api-access-22klm") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "kube-api-access-22klm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.838462 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.894531 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902358 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902402 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902415 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902427 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22klm\" (UniqueName: \"kubernetes.io/projected/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-kube-api-access-22klm\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902442 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.902453 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:21 crc kubenswrapper[4784]: I1205 12:47:21.904142 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data" (OuterVolumeSpecName: "config-data") pod "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" (UID: "b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.003837 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009106 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-glpkn"] Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009504 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="sg-core" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009519 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="sg-core" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009532 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-notification-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009539 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-notification-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009551 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124aed59-cf1a-4150-adf5-a055107f2834" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009557 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="124aed59-cf1a-4150-adf5-a055107f2834" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009575 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-central-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009580 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-central-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009593 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5de896-0d89-42c3-a59b-d07f226e76dc" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009599 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5de896-0d89-42c3-a59b-d07f226e76dc" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009607 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d6e94d-da77-4211-8009-cec8d1ae70b4" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009613 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d6e94d-da77-4211-8009-cec8d1ae70b4" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009622 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="proxy-httpd" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009628 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="proxy-httpd" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009638 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb722f31-d720-4de5-9a0e-cdbb3af2a535" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009644 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb722f31-d720-4de5-9a0e-cdbb3af2a535" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009657 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81b0108-f196-4724-90bb-c60348271f96" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009663 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81b0108-f196-4724-90bb-c60348271f96" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: E1205 12:47:22.009689 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64467fbc-3720-453a-af74-aee0374aaa3a" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009715 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="64467fbc-3720-453a-af74-aee0374aaa3a" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009874 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="sg-core" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009883 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="124aed59-cf1a-4150-adf5-a055107f2834" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009892 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-notification-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009901 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="proxy-httpd" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009913 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb722f31-d720-4de5-9a0e-cdbb3af2a535" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009923 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81b0108-f196-4724-90bb-c60348271f96" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009936 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5de896-0d89-42c3-a59b-d07f226e76dc" containerName="mariadb-account-create-update" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009956 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" containerName="ceilometer-central-agent" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009966 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d6e94d-da77-4211-8009-cec8d1ae70b4" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.009973 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="64467fbc-3720-453a-af74-aee0374aaa3a" containerName="mariadb-database-create" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.010641 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.012669 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-nx8fv" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.014806 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.015013 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.028504 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-glpkn"] Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.105349 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.105455 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.105505 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.105666 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqw4n\" (UniqueName: \"kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.207118 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.207179 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.207223 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.207277 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqw4n\" (UniqueName: \"kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.211456 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.211717 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.216400 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.227268 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqw4n\" (UniqueName: \"kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n\") pod \"nova-cell0-conductor-db-sync-glpkn\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.328068 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a","Type":"ContainerDied","Data":"1cceb2977a103b0010ed3dcfa107b2de5ff3106954a64b15b57636fa3f37441d"} Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.328116 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.328116 4784 scope.go:117] "RemoveContainer" containerID="6b0b59669c7fe1b85d646c7829f5bc79249a0a863f46a0aafdb0c060a663606d" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.332531 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.360305 4784 scope.go:117] "RemoveContainer" containerID="5fd48bf457e80858858a56106210db056bc8c128a2acc4a4203aaa761523354b" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.377252 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.386855 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.410349 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.415509 4784 scope.go:117] "RemoveContainer" containerID="2a89169972b180f481da8f5c97aa06ae28c5c3ef0bbcc4e9f7c97c3de073c8d0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.420535 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.423867 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.424047 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.428685 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.481043 4784 scope.go:117] "RemoveContainer" containerID="db71b94d5262c5125beb4691a9db6daa8bdd39fdb8c7562ae7af96d05e171c57" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518351 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb56x\" (UniqueName: \"kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518691 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518775 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518826 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518859 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518891 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.518928 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621405 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621445 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621476 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621705 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621735 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb56x\" (UniqueName: \"kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621819 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.621895 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.624419 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.624977 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.629639 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.629755 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.629763 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.632700 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.649777 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb56x\" (UniqueName: \"kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x\") pod \"ceilometer-0\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.781914 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:22 crc kubenswrapper[4784]: I1205 12:47:22.930594 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-glpkn"] Dec 05 12:47:23 crc kubenswrapper[4784]: I1205 12:47:23.031001 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a" path="/var/lib/kubelet/pods/b6e4d90c-41c9-47ea-9d4b-dfdd323fd97a/volumes" Dec 05 12:47:23 crc kubenswrapper[4784]: I1205 12:47:23.331702 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:23 crc kubenswrapper[4784]: I1205 12:47:23.358415 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-glpkn" event={"ID":"26e5617b-90b9-451b-a0a1-6f43d885ab38","Type":"ContainerStarted","Data":"a7e96d4df90740b26c0a940952becbbf94eaf1c44e090ba1278db78035a9b201"} Dec 05 12:47:24 crc kubenswrapper[4784]: I1205 12:47:24.379165 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerStarted","Data":"0263c322a90da2a28ca135e91605fbba5b50b37ebc328552c6fdf6d25d1a1e8c"} Dec 05 12:47:24 crc kubenswrapper[4784]: I1205 12:47:24.379658 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerStarted","Data":"77ed8b68f4b7dfcf165a9c047c746998d25332523c092e2e1e8757616f5878a2"} Dec 05 12:47:24 crc kubenswrapper[4784]: I1205 12:47:24.379671 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerStarted","Data":"8a9db368e5abe464d5079bb826e5859efa3c6245515805901602614bf33e2be5"} Dec 05 12:47:25 crc kubenswrapper[4784]: I1205 12:47:25.377220 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:25 crc kubenswrapper[4784]: I1205 12:47:25.402868 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerStarted","Data":"05a25b5c3c732338031f6b50d7e775ac3dc3ec7d16db9aa1cbcc2e9920c895b6"} Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.413942 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerStarted","Data":"0f812b6f667919326683ec776a206cc3f519d20668b9a36474214af552caa6b8"} Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.414486 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.414251 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="proxy-httpd" containerID="cri-o://0f812b6f667919326683ec776a206cc3f519d20668b9a36474214af552caa6b8" gracePeriod=30 Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.414138 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-central-agent" containerID="cri-o://77ed8b68f4b7dfcf165a9c047c746998d25332523c092e2e1e8757616f5878a2" gracePeriod=30 Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.414287 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-notification-agent" containerID="cri-o://0263c322a90da2a28ca135e91605fbba5b50b37ebc328552c6fdf6d25d1a1e8c" gracePeriod=30 Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.414365 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="sg-core" containerID="cri-o://05a25b5c3c732338031f6b50d7e775ac3dc3ec7d16db9aa1cbcc2e9920c895b6" gracePeriod=30 Dec 05 12:47:26 crc kubenswrapper[4784]: I1205 12:47:26.436847 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.893720839 podStartE2EDuration="4.436826945s" podCreationTimestamp="2025-12-05 12:47:22 +0000 UTC" firstStartedPulling="2025-12-05 12:47:23.339889326 +0000 UTC m=+1322.759956141" lastFinishedPulling="2025-12-05 12:47:25.882995442 +0000 UTC m=+1325.303062247" observedRunningTime="2025-12-05 12:47:26.432738327 +0000 UTC m=+1325.852805162" watchObservedRunningTime="2025-12-05 12:47:26.436826945 +0000 UTC m=+1325.856893750" Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.001375 4784 scope.go:117] "RemoveContainer" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.428062 4784 generic.go:334] "Generic (PLEG): container finished" podID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerID="05a25b5c3c732338031f6b50d7e775ac3dc3ec7d16db9aa1cbcc2e9920c895b6" exitCode=2 Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.428103 4784 generic.go:334] "Generic (PLEG): container finished" podID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerID="0263c322a90da2a28ca135e91605fbba5b50b37ebc328552c6fdf6d25d1a1e8c" exitCode=0 Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.428158 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerDied","Data":"05a25b5c3c732338031f6b50d7e775ac3dc3ec7d16db9aa1cbcc2e9920c895b6"} Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.428191 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerDied","Data":"0263c322a90da2a28ca135e91605fbba5b50b37ebc328552c6fdf6d25d1a1e8c"} Dec 05 12:47:27 crc kubenswrapper[4784]: I1205 12:47:27.430368 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerStarted","Data":"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1"} Dec 05 12:47:29 crc kubenswrapper[4784]: I1205 12:47:29.572173 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:47:29 crc kubenswrapper[4784]: I1205 12:47:29.572793 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:47:31 crc kubenswrapper[4784]: I1205 12:47:31.092716 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:31 crc kubenswrapper[4784]: I1205 12:47:31.093761 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-log" containerID="cri-o://231f4e7974ee1b7a58afbcd6f8df44026e1162ed5902112b2eb4d27b80811787" gracePeriod=30 Dec 05 12:47:31 crc kubenswrapper[4784]: I1205 12:47:31.094133 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-httpd" containerID="cri-o://88d97f20401aabed2d2b719684b04a0fb86fb73778e11c78d156f31d4e295249" gracePeriod=30 Dec 05 12:47:31 crc kubenswrapper[4784]: I1205 12:47:31.498032 4784 generic.go:334] "Generic (PLEG): container finished" podID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerID="231f4e7974ee1b7a58afbcd6f8df44026e1162ed5902112b2eb4d27b80811787" exitCode=143 Dec 05 12:47:31 crc kubenswrapper[4784]: I1205 12:47:31.498110 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerDied","Data":"231f4e7974ee1b7a58afbcd6f8df44026e1162ed5902112b2eb4d27b80811787"} Dec 05 12:47:32 crc kubenswrapper[4784]: I1205 12:47:32.314377 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:32 crc kubenswrapper[4784]: I1205 12:47:32.314438 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:32 crc kubenswrapper[4784]: I1205 12:47:32.347017 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:32 crc kubenswrapper[4784]: I1205 12:47:32.549819 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:33 crc kubenswrapper[4784]: I1205 12:47:33.330546 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:33 crc kubenswrapper[4784]: I1205 12:47:33.525240 4784 generic.go:334] "Generic (PLEG): container finished" podID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerID="77ed8b68f4b7dfcf165a9c047c746998d25332523c092e2e1e8757616f5878a2" exitCode=0 Dec 05 12:47:33 crc kubenswrapper[4784]: I1205 12:47:33.525346 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerDied","Data":"77ed8b68f4b7dfcf165a9c047c746998d25332523c092e2e1e8757616f5878a2"} Dec 05 12:47:33 crc kubenswrapper[4784]: I1205 12:47:33.528297 4784 generic.go:334] "Generic (PLEG): container finished" podID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerID="88d97f20401aabed2d2b719684b04a0fb86fb73778e11c78d156f31d4e295249" exitCode=0 Dec 05 12:47:33 crc kubenswrapper[4784]: I1205 12:47:33.528373 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerDied","Data":"88d97f20401aabed2d2b719684b04a0fb86fb73778e11c78d156f31d4e295249"} Dec 05 12:47:34 crc kubenswrapper[4784]: I1205 12:47:34.538359 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" containerID="cri-o://0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1" gracePeriod=30 Dec 05 12:47:34 crc kubenswrapper[4784]: I1205 12:47:34.878722 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:34 crc kubenswrapper[4784]: I1205 12:47:34.879328 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-log" containerID="cri-o://570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c" gracePeriod=30 Dec 05 12:47:34 crc kubenswrapper[4784]: I1205 12:47:34.879795 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-httpd" containerID="cri-o://47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff" gracePeriod=30 Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.394921 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489357 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489694 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92k5g\" (UniqueName: \"kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489879 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489931 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489954 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.489974 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.490024 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.490049 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs\") pod \"c4ff7099-6040-47b5-b8bd-3951cbd08109\" (UID: \"c4ff7099-6040-47b5-b8bd-3951cbd08109\") " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.491691 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.492273 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs" (OuterVolumeSpecName: "logs") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.493014 4784 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.493052 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4ff7099-6040-47b5-b8bd-3951cbd08109-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.495868 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g" (OuterVolumeSpecName: "kube-api-access-92k5g") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "kube-api-access-92k5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.505524 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts" (OuterVolumeSpecName: "scripts") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.505707 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.576313 4784 generic.go:334] "Generic (PLEG): container finished" podID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerID="570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c" exitCode=143 Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.576418 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerDied","Data":"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c"} Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.584301 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.585787 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c4ff7099-6040-47b5-b8bd-3951cbd08109","Type":"ContainerDied","Data":"07d80b8e8c5a0e1b3d7a95c3b9c3e904f7b6e6451fbf0a5668a87778e7605013"} Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.585861 4784 scope.go:117] "RemoveContainer" containerID="88d97f20401aabed2d2b719684b04a0fb86fb73778e11c78d156f31d4e295249" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.586070 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.594728 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.594761 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.594775 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.594789 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92k5g\" (UniqueName: \"kubernetes.io/projected/c4ff7099-6040-47b5-b8bd-3951cbd08109-kube-api-access-92k5g\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.618182 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data" (OuterVolumeSpecName: "config-data") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.632507 4784 scope.go:117] "RemoveContainer" containerID="231f4e7974ee1b7a58afbcd6f8df44026e1162ed5902112b2eb4d27b80811787" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.642226 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c4ff7099-6040-47b5-b8bd-3951cbd08109" (UID: "c4ff7099-6040-47b5-b8bd-3951cbd08109"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.666897 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.696113 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.696145 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.696154 4784 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ff7099-6040-47b5-b8bd-3951cbd08109-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.927213 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.937007 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.955163 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:35 crc kubenswrapper[4784]: E1205 12:47:35.955574 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-httpd" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.955590 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-httpd" Dec 05 12:47:35 crc kubenswrapper[4784]: E1205 12:47:35.955605 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-log" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.955613 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-log" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.955796 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-log" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.955824 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" containerName="glance-httpd" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.956801 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.958827 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.959302 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 12:47:35 crc kubenswrapper[4784]: I1205 12:47:35.967960 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001696 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-logs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001743 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-scripts\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001802 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001830 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001946 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.001978 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-config-data\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.002035 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-966vx\" (UniqueName: \"kubernetes.io/projected/60863f45-1c7b-4e86-8782-aece4b178edb-kube-api-access-966vx\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.002201 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.103879 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-logs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.103929 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-scripts\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.103955 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.103982 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.104082 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.104113 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-config-data\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.104161 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-966vx\" (UniqueName: \"kubernetes.io/projected/60863f45-1c7b-4e86-8782-aece4b178edb-kube-api-access-966vx\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.104222 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.104709 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.105529 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-logs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.105534 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60863f45-1c7b-4e86-8782-aece4b178edb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.109508 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-config-data\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.110216 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.110366 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.115811 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60863f45-1c7b-4e86-8782-aece4b178edb-scripts\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.130659 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-966vx\" (UniqueName: \"kubernetes.io/projected/60863f45-1c7b-4e86-8782-aece4b178edb-kube-api-access-966vx\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.155368 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"60863f45-1c7b-4e86-8782-aece4b178edb\") " pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.288885 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.606486 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-glpkn" event={"ID":"26e5617b-90b9-451b-a0a1-6f43d885ab38","Type":"ContainerStarted","Data":"602aae4c1761197ed50d137a50507c7c6bfbc79715901569be87bb7d536fdc47"} Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.626457 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-glpkn" podStartSLOduration=3.614641963 podStartE2EDuration="15.626438753s" podCreationTimestamp="2025-12-05 12:47:21 +0000 UTC" firstStartedPulling="2025-12-05 12:47:22.941355766 +0000 UTC m=+1322.361422581" lastFinishedPulling="2025-12-05 12:47:34.953152566 +0000 UTC m=+1334.373219371" observedRunningTime="2025-12-05 12:47:36.625120843 +0000 UTC m=+1336.045187658" watchObservedRunningTime="2025-12-05 12:47:36.626438753 +0000 UTC m=+1336.046505578" Dec 05 12:47:36 crc kubenswrapper[4784]: I1205 12:47:36.945374 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.034235 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4ff7099-6040-47b5-b8bd-3951cbd08109" path="/var/lib/kubelet/pods/c4ff7099-6040-47b5-b8bd-3951cbd08109/volumes" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.141412 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.329760 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.329828 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.329881 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.329940 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.329955 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.330004 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.330142 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dhkh\" (UniqueName: \"kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.330201 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts\") pod \"eeba033c-75f0-4528-b3ee-13bab8f9669c\" (UID: \"eeba033c-75f0-4528-b3ee-13bab8f9669c\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.331004 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.331340 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs" (OuterVolumeSpecName: "logs") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.348425 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh" (OuterVolumeSpecName: "kube-api-access-9dhkh") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "kube-api-access-9dhkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.358595 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.381838 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts" (OuterVolumeSpecName: "scripts") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.396958 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.430602 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data" (OuterVolumeSpecName: "config-data") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432434 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dhkh\" (UniqueName: \"kubernetes.io/projected/eeba033c-75f0-4528-b3ee-13bab8f9669c-kube-api-access-9dhkh\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432457 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432489 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432502 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432510 4784 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeba033c-75f0-4528-b3ee-13bab8f9669c-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432520 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.432528 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.466765 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.480429 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.494586 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "eeba033c-75f0-4528-b3ee-13bab8f9669c" (UID: "eeba033c-75f0-4528-b3ee-13bab8f9669c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.539663 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs\") pod \"9ce21c3e-07a5-4404-827e-367acaba9d66\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.539804 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75t69\" (UniqueName: \"kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69\") pod \"9ce21c3e-07a5-4404-827e-367acaba9d66\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.539859 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca\") pod \"9ce21c3e-07a5-4404-827e-367acaba9d66\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.539891 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle\") pod \"9ce21c3e-07a5-4404-827e-367acaba9d66\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.540075 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data\") pod \"9ce21c3e-07a5-4404-827e-367acaba9d66\" (UID: \"9ce21c3e-07a5-4404-827e-367acaba9d66\") " Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.559949 4784 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeba033c-75f0-4528-b3ee-13bab8f9669c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.560269 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.561589 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs" (OuterVolumeSpecName: "logs") pod "9ce21c3e-07a5-4404-827e-367acaba9d66" (UID: "9ce21c3e-07a5-4404-827e-367acaba9d66"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.650027 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69" (OuterVolumeSpecName: "kube-api-access-75t69") pod "9ce21c3e-07a5-4404-827e-367acaba9d66" (UID: "9ce21c3e-07a5-4404-827e-367acaba9d66"). InnerVolumeSpecName "kube-api-access-75t69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.654802 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerID="0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1" exitCode=0 Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.654873 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerDied","Data":"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1"} Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.654903 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"9ce21c3e-07a5-4404-827e-367acaba9d66","Type":"ContainerDied","Data":"17803a9f081def484ae402373f4e55e1b97e5c57dfdc49d839896b38b0fec8cc"} Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.654929 4784 scope.go:117] "RemoveContainer" containerID="0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.655081 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.656830 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"60863f45-1c7b-4e86-8782-aece4b178edb","Type":"ContainerStarted","Data":"1d2b21414511c6c48af40649749957c9bc14d2aafe169e05f979c3d00d575818"} Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.666060 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ce21c3e-07a5-4404-827e-367acaba9d66-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.666097 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75t69\" (UniqueName: \"kubernetes.io/projected/9ce21c3e-07a5-4404-827e-367acaba9d66-kube-api-access-75t69\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.699771 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ce21c3e-07a5-4404-827e-367acaba9d66" (UID: "9ce21c3e-07a5-4404-827e-367acaba9d66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.702527 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data" (OuterVolumeSpecName: "config-data") pod "9ce21c3e-07a5-4404-827e-367acaba9d66" (UID: "9ce21c3e-07a5-4404-827e-367acaba9d66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.716356 4784 scope.go:117] "RemoveContainer" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.718603 4784 generic.go:334] "Generic (PLEG): container finished" podID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerID="47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff" exitCode=0 Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.719922 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.722351 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerDied","Data":"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff"} Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.722383 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeba033c-75f0-4528-b3ee-13bab8f9669c","Type":"ContainerDied","Data":"14304b8e9e38f80bdb48ac4ab4c1df56f5463f7aac128e2baa25d06351958a50"} Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.728352 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "9ce21c3e-07a5-4404-827e-367acaba9d66" (UID: "9ce21c3e-07a5-4404-827e-367acaba9d66"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.769005 4784 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.769036 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.769047 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ce21c3e-07a5-4404-827e-367acaba9d66-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.773980 4784 scope.go:117] "RemoveContainer" containerID="0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.775979 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1\": container with ID starting with 0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1 not found: ID does not exist" containerID="0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.776029 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1"} err="failed to get container status \"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1\": rpc error: code = NotFound desc = could not find container \"0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1\": container with ID starting with 0b52303426c0de0cd78589f5c11f393312bbcd3f270beb762e6eae7ea58e2fc1 not found: ID does not exist" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.776057 4784 scope.go:117] "RemoveContainer" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.776606 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c\": container with ID starting with e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c not found: ID does not exist" containerID="e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.776640 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c"} err="failed to get container status \"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c\": rpc error: code = NotFound desc = could not find container \"e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c\": container with ID starting with e39f25af97f8feb40ef61f3a8f1111a8e458c3f128fe65b6d33e77e3bfe59b7c not found: ID does not exist" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.776664 4784 scope.go:117] "RemoveContainer" containerID="47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.904142 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.921348 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.929495 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.929970 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.929998 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.930013 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-log" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930022 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-log" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.930044 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930052 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.930077 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930087 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.930109 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-httpd" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930116 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-httpd" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930384 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930404 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930426 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930440 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-httpd" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930450 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" containerName="glance-log" Dec 05 12:47:37 crc kubenswrapper[4784]: E1205 12:47:37.930683 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930695 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.930922 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" containerName="watcher-decision-engine" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.931733 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.937004 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.937201 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 12:47:37 crc kubenswrapper[4784]: I1205 12:47:37.939883 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.013897 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.030350 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.047159 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.048851 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.051245 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.056476 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073303 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073350 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073391 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df59t\" (UniqueName: \"kubernetes.io/projected/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-kube-api-access-df59t\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073437 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073459 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073536 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073557 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.073576 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174728 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174784 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174813 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174841 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-logs\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174924 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174959 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.174986 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175108 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df59t\" (UniqueName: \"kubernetes.io/projected/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-kube-api-access-df59t\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175129 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175400 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175513 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175520 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175589 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175653 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175698 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j8qp\" (UniqueName: \"kubernetes.io/projected/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-kube-api-access-4j8qp\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.175817 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.179952 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.179949 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.185370 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.191167 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.199055 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df59t\" (UniqueName: \"kubernetes.io/projected/4f0ebe85-0cf4-4cbf-9b72-1561ca313666-kube-api-access-df59t\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.209874 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f0ebe85-0cf4-4cbf-9b72-1561ca313666\") " pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.245937 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.277293 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.277417 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j8qp\" (UniqueName: \"kubernetes.io/projected/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-kube-api-access-4j8qp\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.277495 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.277724 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-logs\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.277833 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.278558 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-logs\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.282825 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.290168 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.290562 4784 scope.go:117] "RemoveContainer" containerID="570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.293319 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.301639 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j8qp\" (UniqueName: \"kubernetes.io/projected/7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1-kube-api-access-4j8qp\") pod \"watcher-decision-engine-0\" (UID: \"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1\") " pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.366642 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.459412 4784 scope.go:117] "RemoveContainer" containerID="47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff" Dec 05 12:47:38 crc kubenswrapper[4784]: E1205 12:47:38.461354 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff\": container with ID starting with 47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff not found: ID does not exist" containerID="47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.461396 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff"} err="failed to get container status \"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff\": rpc error: code = NotFound desc = could not find container \"47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff\": container with ID starting with 47ebf0103ed665e5df4b174bfa2cfac19a6804c39a76fda9244314dbb7d47bff not found: ID does not exist" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.461424 4784 scope.go:117] "RemoveContainer" containerID="570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c" Dec 05 12:47:38 crc kubenswrapper[4784]: E1205 12:47:38.466927 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c\": container with ID starting with 570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c not found: ID does not exist" containerID="570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.466981 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c"} err="failed to get container status \"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c\": rpc error: code = NotFound desc = could not find container \"570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c\": container with ID starting with 570eb15bffb9b17b6eabb60d2c9dc61d93e9ae20ec1868516f6b6eebb2504f9c not found: ID does not exist" Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.748914 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"60863f45-1c7b-4e86-8782-aece4b178edb","Type":"ContainerStarted","Data":"07bd5a097eefc1366b8d293ee624ffd3f92517b57fa25ad9b0d03fcf792adad8"} Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.936336 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: I1205 12:47:38.978517 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 12:47:38 crc kubenswrapper[4784]: W1205 12:47:38.988089 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7dd4bdfd_f163_412e_81a1_ee0d8a2c6aa1.slice/crio-0c13aba64e74b7aa89f963fbb58d4f98c7a6e6eb52a35185dc58abfa1d2c25b2 WatchSource:0}: Error finding container 0c13aba64e74b7aa89f963fbb58d4f98c7a6e6eb52a35185dc58abfa1d2c25b2: Status 404 returned error can't find the container with id 0c13aba64e74b7aa89f963fbb58d4f98c7a6e6eb52a35185dc58abfa1d2c25b2 Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.017125 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ce21c3e-07a5-4404-827e-367acaba9d66" path="/var/lib/kubelet/pods/9ce21c3e-07a5-4404-827e-367acaba9d66/volumes" Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.018609 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeba033c-75f0-4528-b3ee-13bab8f9669c" path="/var/lib/kubelet/pods/eeba033c-75f0-4528-b3ee-13bab8f9669c/volumes" Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.782184 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f0ebe85-0cf4-4cbf-9b72-1561ca313666","Type":"ContainerStarted","Data":"1b93bb9a6beac999bb8e517cac4252e7016ecc104f6210d64d34ddc2479897ba"} Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.782793 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f0ebe85-0cf4-4cbf-9b72-1561ca313666","Type":"ContainerStarted","Data":"fb32a681e61fdb1034d8a5c4fe124a5572bccd0d8bb666b2332db89f7dbb61d1"} Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.784161 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"60863f45-1c7b-4e86-8782-aece4b178edb","Type":"ContainerStarted","Data":"ee894edb4d5101c50527715acce0d791313393cbd320f180803d19b5e8523a65"} Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.822017 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1","Type":"ContainerStarted","Data":"050fd2d3e7ee85ddb4eaed9341e47df8ed94691623052cd792d22bed422c3a5e"} Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.822073 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1","Type":"ContainerStarted","Data":"0c13aba64e74b7aa89f963fbb58d4f98c7a6e6eb52a35185dc58abfa1d2c25b2"} Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.827301 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.827281493 podStartE2EDuration="4.827281493s" podCreationTimestamp="2025-12-05 12:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:39.820686987 +0000 UTC m=+1339.240753802" watchObservedRunningTime="2025-12-05 12:47:39.827281493 +0000 UTC m=+1339.247348308" Dec 05 12:47:39 crc kubenswrapper[4784]: I1205 12:47:39.853000 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.852985005 podStartE2EDuration="2.852985005s" podCreationTimestamp="2025-12-05 12:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:39.851423476 +0000 UTC m=+1339.271490301" watchObservedRunningTime="2025-12-05 12:47:39.852985005 +0000 UTC m=+1339.273051820" Dec 05 12:47:40 crc kubenswrapper[4784]: I1205 12:47:40.834960 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f0ebe85-0cf4-4cbf-9b72-1561ca313666","Type":"ContainerStarted","Data":"70fb34e68be1e7e83124cb48e041d7d9962e19761dda2632544fba7f8b0dd043"} Dec 05 12:47:40 crc kubenswrapper[4784]: I1205 12:47:40.880929 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.880907604 podStartE2EDuration="3.880907604s" podCreationTimestamp="2025-12-05 12:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:40.872903994 +0000 UTC m=+1340.292970819" watchObservedRunningTime="2025-12-05 12:47:40.880907604 +0000 UTC m=+1340.300974419" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.289392 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.289951 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.324950 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.337570 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.896734 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:47:46 crc kubenswrapper[4784]: I1205 12:47:46.897003 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.246616 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.246657 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.286224 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.294489 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.367588 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.393962 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.748679 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.749853 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.915535 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.915577 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.915592 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:48 crc kubenswrapper[4784]: I1205 12:47:48.947890 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 12:47:49 crc kubenswrapper[4784]: I1205 12:47:49.925112 4784 generic.go:334] "Generic (PLEG): container finished" podID="26e5617b-90b9-451b-a0a1-6f43d885ab38" containerID="602aae4c1761197ed50d137a50507c7c6bfbc79715901569be87bb7d536fdc47" exitCode=0 Dec 05 12:47:49 crc kubenswrapper[4784]: I1205 12:47:49.925289 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-glpkn" event={"ID":"26e5617b-90b9-451b-a0a1-6f43d885ab38","Type":"ContainerDied","Data":"602aae4c1761197ed50d137a50507c7c6bfbc79715901569be87bb7d536fdc47"} Dec 05 12:47:50 crc kubenswrapper[4784]: I1205 12:47:50.888999 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:50 crc kubenswrapper[4784]: I1205 12:47:50.934550 4784 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.068379 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.386571 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.552338 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle\") pod \"26e5617b-90b9-451b-a0a1-6f43d885ab38\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.552623 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data\") pod \"26e5617b-90b9-451b-a0a1-6f43d885ab38\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.552687 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts\") pod \"26e5617b-90b9-451b-a0a1-6f43d885ab38\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.552904 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqw4n\" (UniqueName: \"kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n\") pod \"26e5617b-90b9-451b-a0a1-6f43d885ab38\" (UID: \"26e5617b-90b9-451b-a0a1-6f43d885ab38\") " Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.560342 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts" (OuterVolumeSpecName: "scripts") pod "26e5617b-90b9-451b-a0a1-6f43d885ab38" (UID: "26e5617b-90b9-451b-a0a1-6f43d885ab38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.560396 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n" (OuterVolumeSpecName: "kube-api-access-fqw4n") pod "26e5617b-90b9-451b-a0a1-6f43d885ab38" (UID: "26e5617b-90b9-451b-a0a1-6f43d885ab38"). InnerVolumeSpecName "kube-api-access-fqw4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.583450 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26e5617b-90b9-451b-a0a1-6f43d885ab38" (UID: "26e5617b-90b9-451b-a0a1-6f43d885ab38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.595374 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data" (OuterVolumeSpecName: "config-data") pod "26e5617b-90b9-451b-a0a1-6f43d885ab38" (UID: "26e5617b-90b9-451b-a0a1-6f43d885ab38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.655695 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqw4n\" (UniqueName: \"kubernetes.io/projected/26e5617b-90b9-451b-a0a1-6f43d885ab38-kube-api-access-fqw4n\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.655957 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.656039 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.656157 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26e5617b-90b9-451b-a0a1-6f43d885ab38-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.951629 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-glpkn" event={"ID":"26e5617b-90b9-451b-a0a1-6f43d885ab38","Type":"ContainerDied","Data":"a7e96d4df90740b26c0a940952becbbf94eaf1c44e090ba1278db78035a9b201"} Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.951696 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7e96d4df90740b26c0a940952becbbf94eaf1c44e090ba1278db78035a9b201" Dec 05 12:47:51 crc kubenswrapper[4784]: I1205 12:47:51.951654 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-glpkn" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.246680 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:47:52 crc kubenswrapper[4784]: E1205 12:47:52.247291 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26e5617b-90b9-451b-a0a1-6f43d885ab38" containerName="nova-cell0-conductor-db-sync" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.247321 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="26e5617b-90b9-451b-a0a1-6f43d885ab38" containerName="nova-cell0-conductor-db-sync" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.247693 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="26e5617b-90b9-451b-a0a1-6f43d885ab38" containerName="nova-cell0-conductor-db-sync" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.250802 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.253628 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.253677 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-nx8fv" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.261252 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.368970 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.369278 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67wjr\" (UniqueName: \"kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.369376 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.471337 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.471549 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67wjr\" (UniqueName: \"kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.471596 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.478404 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.478837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.492202 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67wjr\" (UniqueName: \"kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr\") pod \"nova-cell0-conductor-0\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.574789 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:52 crc kubenswrapper[4784]: I1205 12:47:52.791322 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 12:47:53 crc kubenswrapper[4784]: I1205 12:47:53.160915 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:47:53 crc kubenswrapper[4784]: W1205 12:47:53.183486 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeceb1ef2_197a_47dd_9d57_94990b64208e.slice/crio-96ea81a0dd72f3ab679b0ccba143d9a81802c27dec75785152a582a12695645d WatchSource:0}: Error finding container 96ea81a0dd72f3ab679b0ccba143d9a81802c27dec75785152a582a12695645d: Status 404 returned error can't find the container with id 96ea81a0dd72f3ab679b0ccba143d9a81802c27dec75785152a582a12695645d Dec 05 12:47:53 crc kubenswrapper[4784]: I1205 12:47:53.980342 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eceb1ef2-197a-47dd-9d57-94990b64208e","Type":"ContainerStarted","Data":"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd"} Dec 05 12:47:53 crc kubenswrapper[4784]: I1205 12:47:53.980980 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:53 crc kubenswrapper[4784]: I1205 12:47:53.980997 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eceb1ef2-197a-47dd-9d57-94990b64208e","Type":"ContainerStarted","Data":"96ea81a0dd72f3ab679b0ccba143d9a81802c27dec75785152a582a12695645d"} Dec 05 12:47:54 crc kubenswrapper[4784]: I1205 12:47:54.000244 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.000222224 podStartE2EDuration="2.000222224s" podCreationTimestamp="2025-12-05 12:47:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:47:53.996313753 +0000 UTC m=+1353.416380568" watchObservedRunningTime="2025-12-05 12:47:54.000222224 +0000 UTC m=+1353.420289039" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.012283 4784 generic.go:334] "Generic (PLEG): container finished" podID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerID="0f812b6f667919326683ec776a206cc3f519d20668b9a36474214af552caa6b8" exitCode=137 Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.012609 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerDied","Data":"0f812b6f667919326683ec776a206cc3f519d20668b9a36474214af552caa6b8"} Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.641107 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.740587 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.740766 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="eceb1ef2-197a-47dd-9d57-94990b64208e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd" gracePeriod=30 Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805693 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805769 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805812 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb56x\" (UniqueName: \"kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805841 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805902 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805920 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.805980 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd\") pod \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\" (UID: \"a2f1352c-4d83-4d92-ab5e-f81457da7f57\") " Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.806840 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.807611 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.811538 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts" (OuterVolumeSpecName: "scripts") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.813917 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x" (OuterVolumeSpecName: "kube-api-access-sb56x") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "kube-api-access-sb56x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.861276 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.908514 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.908833 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb56x\" (UniqueName: \"kubernetes.io/projected/a2f1352c-4d83-4d92-ab5e-f81457da7f57-kube-api-access-sb56x\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.908850 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.908860 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.908868 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a2f1352c-4d83-4d92-ab5e-f81457da7f57-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.932305 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:57 crc kubenswrapper[4784]: I1205 12:47:57.972776 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data" (OuterVolumeSpecName: "config-data") pod "a2f1352c-4d83-4d92-ab5e-f81457da7f57" (UID: "a2f1352c-4d83-4d92-ab5e-f81457da7f57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.010649 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.010695 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2f1352c-4d83-4d92-ab5e-f81457da7f57-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.025553 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a2f1352c-4d83-4d92-ab5e-f81457da7f57","Type":"ContainerDied","Data":"8a9db368e5abe464d5079bb826e5859efa3c6245515805901602614bf33e2be5"} Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.025613 4784 scope.go:117] "RemoveContainer" containerID="0f812b6f667919326683ec776a206cc3f519d20668b9a36474214af552caa6b8" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.025787 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.064037 4784 scope.go:117] "RemoveContainer" containerID="05a25b5c3c732338031f6b50d7e775ac3dc3ec7d16db9aa1cbcc2e9920c895b6" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.091665 4784 scope.go:117] "RemoveContainer" containerID="0263c322a90da2a28ca135e91605fbba5b50b37ebc328552c6fdf6d25d1a1e8c" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.120182 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.129229 4784 scope.go:117] "RemoveContainer" containerID="77ed8b68f4b7dfcf165a9c047c746998d25332523c092e2e1e8757616f5878a2" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.135290 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148066 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:58 crc kubenswrapper[4784]: E1205 12:47:58.148587 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-central-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148600 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-central-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: E1205 12:47:58.148625 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-notification-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148632 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-notification-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: E1205 12:47:58.148648 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="sg-core" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148654 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="sg-core" Dec 05 12:47:58 crc kubenswrapper[4784]: E1205 12:47:58.148668 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="proxy-httpd" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148673 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="proxy-httpd" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148867 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-notification-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148878 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="sg-core" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148889 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="proxy-httpd" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.148904 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" containerName="ceilometer-central-agent" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.150578 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.152342 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.170712 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.171044 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.215360 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.215650 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.215759 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.215879 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.215956 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7frxn\" (UniqueName: \"kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.216006 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.216038 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.317869 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.317932 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7frxn\" (UniqueName: \"kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.317965 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.317981 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.318031 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.318103 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.318130 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.318506 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.318747 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.322433 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.323760 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.325681 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.326360 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.339102 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7frxn\" (UniqueName: \"kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn\") pod \"ceilometer-0\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.476327 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:47:58 crc kubenswrapper[4784]: I1205 12:47:58.966052 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:58 crc kubenswrapper[4784]: W1205 12:47:58.970536 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebb17583_807d_46d7_a044_770a5df47767.slice/crio-7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a WatchSource:0}: Error finding container 7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a: Status 404 returned error can't find the container with id 7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.010244 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2f1352c-4d83-4d92-ab5e-f81457da7f57" path="/var/lib/kubelet/pods/a2f1352c-4d83-4d92-ab5e-f81457da7f57/volumes" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.036131 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerStarted","Data":"7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a"} Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.573007 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.573471 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.573506 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.574176 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.574243 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709" gracePeriod=600 Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.657070 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.741684 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle\") pod \"eceb1ef2-197a-47dd-9d57-94990b64208e\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.741855 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data\") pod \"eceb1ef2-197a-47dd-9d57-94990b64208e\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.741890 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67wjr\" (UniqueName: \"kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr\") pod \"eceb1ef2-197a-47dd-9d57-94990b64208e\" (UID: \"eceb1ef2-197a-47dd-9d57-94990b64208e\") " Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.748116 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr" (OuterVolumeSpecName: "kube-api-access-67wjr") pod "eceb1ef2-197a-47dd-9d57-94990b64208e" (UID: "eceb1ef2-197a-47dd-9d57-94990b64208e"). InnerVolumeSpecName "kube-api-access-67wjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.767674 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eceb1ef2-197a-47dd-9d57-94990b64208e" (UID: "eceb1ef2-197a-47dd-9d57-94990b64208e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.790781 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data" (OuterVolumeSpecName: "config-data") pod "eceb1ef2-197a-47dd-9d57-94990b64208e" (UID: "eceb1ef2-197a-47dd-9d57-94990b64208e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.814920 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.845647 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.845686 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67wjr\" (UniqueName: \"kubernetes.io/projected/eceb1ef2-197a-47dd-9d57-94990b64208e-kube-api-access-67wjr\") on node \"crc\" DevicePath \"\"" Dec 05 12:47:59 crc kubenswrapper[4784]: I1205 12:47:59.845701 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eceb1ef2-197a-47dd-9d57-94990b64208e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.051355 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerStarted","Data":"02db3d322fbd098914b1cccdb042ddf34a90653fb6fcc25242c4df49b82e93bb"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.051403 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerStarted","Data":"f08cd08f84cfaa38311c120526b32a39d012228084df3f6d36389eb1ec800f46"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.054477 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709" exitCode=0 Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.054537 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.054559 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.054575 4784 scope.go:117] "RemoveContainer" containerID="a15484205cc287f77cc28ed8494e1ca51b919f7c735fb5329f3dd3fb14f9fd3b" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.058222 4784 generic.go:334] "Generic (PLEG): container finished" podID="eceb1ef2-197a-47dd-9d57-94990b64208e" containerID="45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd" exitCode=0 Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.058246 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eceb1ef2-197a-47dd-9d57-94990b64208e","Type":"ContainerDied","Data":"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.058261 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"eceb1ef2-197a-47dd-9d57-94990b64208e","Type":"ContainerDied","Data":"96ea81a0dd72f3ab679b0ccba143d9a81802c27dec75785152a582a12695645d"} Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.058301 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.084366 4784 scope.go:117] "RemoveContainer" containerID="45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.107003 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.122484 4784 scope.go:117] "RemoveContainer" containerID="45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd" Dec 05 12:48:00 crc kubenswrapper[4784]: E1205 12:48:00.122927 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd\": container with ID starting with 45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd not found: ID does not exist" containerID="45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.122960 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd"} err="failed to get container status \"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd\": rpc error: code = NotFound desc = could not find container \"45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd\": container with ID starting with 45c3799d11cb75d1cd3c3ae691070108c05eb1162a1748ac38e0fc7c3a1d4bfd not found: ID does not exist" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.124025 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.139812 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:48:00 crc kubenswrapper[4784]: E1205 12:48:00.140269 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceb1ef2-197a-47dd-9d57-94990b64208e" containerName="nova-cell0-conductor-conductor" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.140288 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceb1ef2-197a-47dd-9d57-94990b64208e" containerName="nova-cell0-conductor-conductor" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.140482 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="eceb1ef2-197a-47dd-9d57-94990b64208e" containerName="nova-cell0-conductor-conductor" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.141108 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.145934 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.146135 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-nx8fv" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.157050 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.275612 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.276014 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.276097 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm69f\" (UniqueName: \"kubernetes.io/projected/306ca66e-9ffa-49fd-b2ad-1021c24fa070-kube-api-access-wm69f\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.377980 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.378124 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm69f\" (UniqueName: \"kubernetes.io/projected/306ca66e-9ffa-49fd-b2ad-1021c24fa070-kube-api-access-wm69f\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.378223 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.384260 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.387245 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/306ca66e-9ffa-49fd-b2ad-1021c24fa070-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.407586 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm69f\" (UniqueName: \"kubernetes.io/projected/306ca66e-9ffa-49fd-b2ad-1021c24fa070-kube-api-access-wm69f\") pod \"nova-cell0-conductor-0\" (UID: \"306ca66e-9ffa-49fd-b2ad-1021c24fa070\") " pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.479437 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:00 crc kubenswrapper[4784]: I1205 12:48:00.950329 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 12:48:01 crc kubenswrapper[4784]: I1205 12:48:01.017879 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eceb1ef2-197a-47dd-9d57-94990b64208e" path="/var/lib/kubelet/pods/eceb1ef2-197a-47dd-9d57-94990b64208e/volumes" Dec 05 12:48:01 crc kubenswrapper[4784]: I1205 12:48:01.071381 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerStarted","Data":"69ffb335fd935f734240e738cf53383a98bc03a86bdf32f2c155c97e7eb42895"} Dec 05 12:48:01 crc kubenswrapper[4784]: I1205 12:48:01.077146 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"306ca66e-9ffa-49fd-b2ad-1021c24fa070","Type":"ContainerStarted","Data":"1b393f1fc4c4dd8c4bafb4220db9788f6ff5ef81d73f22ba2bf8bcc235d2d84f"} Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101179 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerStarted","Data":"305bfc8117568f6360542668643027fb4ba3e66c87cfc9049987d3447fd32bf5"} Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101688 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101372 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-central-agent" containerID="cri-o://f08cd08f84cfaa38311c120526b32a39d012228084df3f6d36389eb1ec800f46" gracePeriod=30 Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101479 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="proxy-httpd" containerID="cri-o://305bfc8117568f6360542668643027fb4ba3e66c87cfc9049987d3447fd32bf5" gracePeriod=30 Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101468 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-notification-agent" containerID="cri-o://02db3d322fbd098914b1cccdb042ddf34a90653fb6fcc25242c4df49b82e93bb" gracePeriod=30 Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.101446 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="sg-core" containerID="cri-o://69ffb335fd935f734240e738cf53383a98bc03a86bdf32f2c155c97e7eb42895" gracePeriod=30 Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.103235 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"306ca66e-9ffa-49fd-b2ad-1021c24fa070","Type":"ContainerStarted","Data":"848473e877915bd9f17287725d5cad0e1e038a6f2b91abecf99ba36553cd3370"} Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.104045 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.138463 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.464724994 podStartE2EDuration="4.138415622s" podCreationTimestamp="2025-12-05 12:47:58 +0000 UTC" firstStartedPulling="2025-12-05 12:47:58.973217505 +0000 UTC m=+1358.393284320" lastFinishedPulling="2025-12-05 12:48:01.646908133 +0000 UTC m=+1361.066974948" observedRunningTime="2025-12-05 12:48:02.127881494 +0000 UTC m=+1361.547948309" watchObservedRunningTime="2025-12-05 12:48:02.138415622 +0000 UTC m=+1361.558482437" Dec 05 12:48:02 crc kubenswrapper[4784]: I1205 12:48:02.149810 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.149789117 podStartE2EDuration="2.149789117s" podCreationTimestamp="2025-12-05 12:48:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:02.146814174 +0000 UTC m=+1361.566881000" watchObservedRunningTime="2025-12-05 12:48:02.149789117 +0000 UTC m=+1361.569855932" Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.112883 4784 generic.go:334] "Generic (PLEG): container finished" podID="ebb17583-807d-46d7-a044-770a5df47767" containerID="305bfc8117568f6360542668643027fb4ba3e66c87cfc9049987d3447fd32bf5" exitCode=0 Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.113178 4784 generic.go:334] "Generic (PLEG): container finished" podID="ebb17583-807d-46d7-a044-770a5df47767" containerID="69ffb335fd935f734240e738cf53383a98bc03a86bdf32f2c155c97e7eb42895" exitCode=2 Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.113198 4784 generic.go:334] "Generic (PLEG): container finished" podID="ebb17583-807d-46d7-a044-770a5df47767" containerID="02db3d322fbd098914b1cccdb042ddf34a90653fb6fcc25242c4df49b82e93bb" exitCode=0 Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.112968 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerDied","Data":"305bfc8117568f6360542668643027fb4ba3e66c87cfc9049987d3447fd32bf5"} Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.113316 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerDied","Data":"69ffb335fd935f734240e738cf53383a98bc03a86bdf32f2c155c97e7eb42895"} Dec 05 12:48:03 crc kubenswrapper[4784]: I1205 12:48:03.113336 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerDied","Data":"02db3d322fbd098914b1cccdb042ddf34a90653fb6fcc25242c4df49b82e93bb"} Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.209258 4784 generic.go:334] "Generic (PLEG): container finished" podID="ebb17583-807d-46d7-a044-770a5df47767" containerID="f08cd08f84cfaa38311c120526b32a39d012228084df3f6d36389eb1ec800f46" exitCode=0 Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.209804 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerDied","Data":"f08cd08f84cfaa38311c120526b32a39d012228084df3f6d36389eb1ec800f46"} Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.209830 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ebb17583-807d-46d7-a044-770a5df47767","Type":"ContainerDied","Data":"7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a"} Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.209840 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b460153b8a5434d68413073a3cdba0a3aa5da73ba4d279fdbc0a8479672bb4a" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.253289 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365503 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365567 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365594 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365646 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365665 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365720 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7frxn\" (UniqueName: \"kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.365789 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd\") pod \"ebb17583-807d-46d7-a044-770a5df47767\" (UID: \"ebb17583-807d-46d7-a044-770a5df47767\") " Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.366281 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.366433 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.371576 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts" (OuterVolumeSpecName: "scripts") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.372861 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn" (OuterVolumeSpecName: "kube-api-access-7frxn") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "kube-api-access-7frxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.396332 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.459133 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467435 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467467 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467481 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ebb17583-807d-46d7-a044-770a5df47767-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467490 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467498 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.467507 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7frxn\" (UniqueName: \"kubernetes.io/projected/ebb17583-807d-46d7-a044-770a5df47767-kube-api-access-7frxn\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.478576 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data" (OuterVolumeSpecName: "config-data") pod "ebb17583-807d-46d7-a044-770a5df47767" (UID: "ebb17583-807d-46d7-a044-770a5df47767"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.510517 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 12:48:10 crc kubenswrapper[4784]: I1205 12:48:10.569312 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb17583-807d-46d7-a044-770a5df47767-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.219139 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.225484 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-9qvf2"] Dec 05 12:48:11 crc kubenswrapper[4784]: E1205 12:48:11.225847 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-central-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.225862 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-central-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: E1205 12:48:11.225881 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="proxy-httpd" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.225902 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="proxy-httpd" Dec 05 12:48:11 crc kubenswrapper[4784]: E1205 12:48:11.225925 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="sg-core" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.225932 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="sg-core" Dec 05 12:48:11 crc kubenswrapper[4784]: E1205 12:48:11.225943 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-notification-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.225949 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-notification-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.226140 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-central-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.226161 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="proxy-httpd" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.226171 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="ceilometer-notification-agent" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.226196 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb17583-807d-46d7-a044-770a5df47767" containerName="sg-core" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.226881 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.232348 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.238713 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.242207 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9qvf2"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.252808 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.270375 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.283677 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vrdb\" (UniqueName: \"kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.283789 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.283866 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.283922 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.293769 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.296690 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.300247 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.301267 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.333527 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387306 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vrdb\" (UniqueName: \"kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387611 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387699 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387778 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387852 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.387943 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.388010 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tmpl\" (UniqueName: \"kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.388079 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.388154 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.388295 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.388450 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.397981 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.398676 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.400167 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.418806 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.418982 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vrdb\" (UniqueName: \"kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb\") pod \"nova-cell0-cell-mapping-9qvf2\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.420517 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.426427 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.452647 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496203 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496252 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496282 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496307 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496327 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tmpl\" (UniqueName: \"kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496350 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496381 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496481 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496519 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.496572 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n78cq\" (UniqueName: \"kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.499263 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.500931 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.505812 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.506371 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.507831 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.514121 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.516452 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.518662 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.525682 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.538265 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.540371 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.545050 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tmpl\" (UniqueName: \"kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl\") pod \"ceilometer-0\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.545641 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.589999 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632243 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632405 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632497 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632535 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n78cq\" (UniqueName: \"kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632645 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z9kt\" (UniqueName: \"kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.632817 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.642139 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.648836 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.649381 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.673445 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.699638 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n78cq\" (UniqueName: \"kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq\") pod \"nova-scheduler-0\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.711708 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.744171 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.745921 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.748183 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.750569 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.750817 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.750897 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.750938 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z9kt\" (UniqueName: \"kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.750991 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.751048 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.751104 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgdp8\" (UniqueName: \"kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.767781 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.773463 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.776484 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z9kt\" (UniqueName: \"kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.789132 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.833956 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.836086 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854565 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854765 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854814 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854842 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854882 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854908 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr4rk\" (UniqueName: \"kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854959 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.854992 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgdp8\" (UniqueName: \"kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.858245 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.863920 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.880352 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.891559 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.891891 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgdp8\" (UniqueName: \"kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8\") pod \"nova-metadata-0\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " pod="openstack/nova-metadata-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.909667 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957283 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957325 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957369 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957396 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pt8j\" (UniqueName: \"kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957426 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957450 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957470 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957497 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957515 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr4rk\" (UniqueName: \"kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957558 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.957969 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.966505 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.968124 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.968456 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:11 crc kubenswrapper[4784]: I1205 12:48:11.978364 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:11.999777 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr4rk\" (UniqueName: \"kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk\") pod \"nova-api-0\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " pod="openstack/nova-api-0" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059616 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059669 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059721 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059791 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059814 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.059853 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pt8j\" (UniqueName: \"kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.060964 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.061492 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.061953 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.063025 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.063559 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.064827 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.091280 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pt8j\" (UniqueName: \"kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j\") pod \"dnsmasq-dns-7998f99df9-vq7lx\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.373644 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.405412 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.589926 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69577"] Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.591328 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.596259 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.596324 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.619463 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69577"] Dec 05 12:48:12 crc kubenswrapper[4784]: W1205 12:48:12.641991 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31e25db3_7d9e_43b2_8e5a_b6956be5114e.slice/crio-27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6 WatchSource:0}: Error finding container 27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6: Status 404 returned error can't find the container with id 27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6 Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.655495 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9qvf2"] Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.681689 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mscbw\" (UniqueName: \"kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.681745 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.681762 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.681948 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.783762 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mscbw\" (UniqueName: \"kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.784137 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.784157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.784356 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.791574 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.806829 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.809473 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.812093 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mscbw\" (UniqueName: \"kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.812508 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts\") pod \"nova-cell1-conductor-db-sync-69577\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.819598 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:12 crc kubenswrapper[4784]: W1205 12:48:12.829415 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73bd63f0_a2ad_4433_b680_8628c737531c.slice/crio-79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1 WatchSource:0}: Error finding container 79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1: Status 404 returned error can't find the container with id 79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1 Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.841921 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:12 crc kubenswrapper[4784]: I1205 12:48:12.915769 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.042381 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb17583-807d-46d7-a044-770a5df47767" path="/var/lib/kubelet/pods/ebb17583-807d-46d7-a044-770a5df47767/volumes" Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.043118 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:13 crc kubenswrapper[4784]: W1205 12:48:13.048207 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec225a4a_0751_4625_9540_086443e2a9a7.slice/crio-130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754 WatchSource:0}: Error finding container 130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754: Status 404 returned error can't find the container with id 130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754 Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.181135 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.282709 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9qvf2" event={"ID":"31e25db3-7d9e-43b2-8e5a-b6956be5114e","Type":"ContainerStarted","Data":"36e33246f62869ea234612af8ba86e05fad0a2809ea7858bd8d9d4f14dc6facf"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.282748 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9qvf2" event={"ID":"31e25db3-7d9e-43b2-8e5a-b6956be5114e","Type":"ContainerStarted","Data":"27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.288816 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" event={"ID":"099d447c-80ed-4714-9da1-48d1a35c1079","Type":"ContainerStarted","Data":"c060b043e24c416d0b251bf5d8e99b71bc49c5b3b570c2886c9b700f2bcf56cb"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.297507 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bd63f0-a2ad-4433-b680-8628c737531c","Type":"ContainerStarted","Data":"79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.324156 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-9qvf2" podStartSLOduration=2.324131829 podStartE2EDuration="2.324131829s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:13.307359185 +0000 UTC m=+1372.727426000" watchObservedRunningTime="2025-12-05 12:48:13.324131829 +0000 UTC m=+1372.744198644" Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.343629 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerStarted","Data":"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.343671 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerStarted","Data":"5e3775fa0a50e49ed01ef79aedf1c3e05afa6aa82da770a7f6439c289e6579ba"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.346362 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerStarted","Data":"130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.358298 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerStarted","Data":"fae03d572f756b9caf1894e359c7c70a78d2caa82f8cb28cbd2aa225bf47d473"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.360121 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ab7198f-769d-49fc-9b8d-cf5825a15018","Type":"ContainerStarted","Data":"12e2a168a59b632b9b06e46f5689f095e9e8cec4d9a9d0582d77945ee09c1325"} Dec 05 12:48:13 crc kubenswrapper[4784]: I1205 12:48:13.530119 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69577"] Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.392519 4784 generic.go:334] "Generic (PLEG): container finished" podID="099d447c-80ed-4714-9da1-48d1a35c1079" containerID="308d26b709675ad4489e290b7c1835596ce190919800ae6b87742129a0bae82d" exitCode=0 Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.393140 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" event={"ID":"099d447c-80ed-4714-9da1-48d1a35c1079","Type":"ContainerDied","Data":"308d26b709675ad4489e290b7c1835596ce190919800ae6b87742129a0bae82d"} Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.398423 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerStarted","Data":"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d"} Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.398463 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerStarted","Data":"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66"} Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.408266 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69577" event={"ID":"e7b90226-6cca-424b-9a49-d2f1bf8c289f","Type":"ContainerStarted","Data":"0f35c6424f83e93161e92f8c310874f70e2e61e59304c6f30ed83bb875f0de4b"} Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.408299 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69577" event={"ID":"e7b90226-6cca-424b-9a49-d2f1bf8c289f","Type":"ContainerStarted","Data":"67cab04da162255c0fecc99ad375061407d2b299fa1072729a38505117116111"} Dec 05 12:48:14 crc kubenswrapper[4784]: I1205 12:48:14.447852 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-69577" podStartSLOduration=2.447829631 podStartE2EDuration="2.447829631s" podCreationTimestamp="2025-12-05 12:48:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:14.444461027 +0000 UTC m=+1373.864527842" watchObservedRunningTime="2025-12-05 12:48:14.447829631 +0000 UTC m=+1373.867896446" Dec 05 12:48:15 crc kubenswrapper[4784]: I1205 12:48:15.424669 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" event={"ID":"099d447c-80ed-4714-9da1-48d1a35c1079","Type":"ContainerStarted","Data":"2e617565b3792a54c876383e7f6ff5c9114998fc38d0f222dbd35e548d33cbf8"} Dec 05 12:48:15 crc kubenswrapper[4784]: I1205 12:48:15.425408 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:15 crc kubenswrapper[4784]: I1205 12:48:15.438619 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:15 crc kubenswrapper[4784]: I1205 12:48:15.448225 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:15 crc kubenswrapper[4784]: I1205 12:48:15.456601 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" podStartSLOduration=4.456577284 podStartE2EDuration="4.456577284s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:15.449339579 +0000 UTC m=+1374.869406404" watchObservedRunningTime="2025-12-05 12:48:15.456577284 +0000 UTC m=+1374.876644099" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.513752 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerStarted","Data":"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.514392 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerStarted","Data":"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.514490 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-log" containerID="cri-o://1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" gracePeriod=30 Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.514496 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-metadata" containerID="cri-o://b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" gracePeriod=30 Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.529826 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ab7198f-769d-49fc-9b8d-cf5825a15018","Type":"ContainerStarted","Data":"0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.533658 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bd63f0-a2ad-4433-b680-8628c737531c","Type":"ContainerStarted","Data":"6934a0f2f73c3ac881bf5c89e07018e75a7fd91d74c96648f990ee733eb5c4ac"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.533818 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="73bd63f0-a2ad-4433-b680-8628c737531c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6934a0f2f73c3ac881bf5c89e07018e75a7fd91d74c96648f990ee733eb5c4ac" gracePeriod=30 Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.547063 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerStarted","Data":"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.547282 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.552983 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.734150883 podStartE2EDuration="7.552965883s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="2025-12-05 12:48:12.874571459 +0000 UTC m=+1372.294638274" lastFinishedPulling="2025-12-05 12:48:17.693386459 +0000 UTC m=+1377.113453274" observedRunningTime="2025-12-05 12:48:18.542402904 +0000 UTC m=+1377.962469739" watchObservedRunningTime="2025-12-05 12:48:18.552965883 +0000 UTC m=+1377.973032698" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.553953 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerStarted","Data":"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.554071 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerStarted","Data":"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580"} Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.563633 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.709934605 podStartE2EDuration="7.563608825s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="2025-12-05 12:48:12.840875106 +0000 UTC m=+1372.260941921" lastFinishedPulling="2025-12-05 12:48:17.694549316 +0000 UTC m=+1377.114616141" observedRunningTime="2025-12-05 12:48:18.558103583 +0000 UTC m=+1377.978170418" watchObservedRunningTime="2025-12-05 12:48:18.563608825 +0000 UTC m=+1377.983675640" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.588857 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.726315187 podStartE2EDuration="7.588838643s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="2025-12-05 12:48:12.815197164 +0000 UTC m=+1372.235263979" lastFinishedPulling="2025-12-05 12:48:17.67772062 +0000 UTC m=+1377.097787435" observedRunningTime="2025-12-05 12:48:18.573790944 +0000 UTC m=+1377.993857759" watchObservedRunningTime="2025-12-05 12:48:18.588838643 +0000 UTC m=+1378.008905458" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.609091 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.968467311 podStartE2EDuration="7.609071075s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="2025-12-05 12:48:13.052785475 +0000 UTC m=+1372.472852300" lastFinishedPulling="2025-12-05 12:48:17.693389249 +0000 UTC m=+1377.113456064" observedRunningTime="2025-12-05 12:48:18.59386096 +0000 UTC m=+1378.013927785" watchObservedRunningTime="2025-12-05 12:48:18.609071075 +0000 UTC m=+1378.029137890" Dec 05 12:48:18 crc kubenswrapper[4784]: I1205 12:48:18.636892 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.358630055 podStartE2EDuration="7.636868863s" podCreationTimestamp="2025-12-05 12:48:11 +0000 UTC" firstStartedPulling="2025-12-05 12:48:12.424250266 +0000 UTC m=+1371.844317081" lastFinishedPulling="2025-12-05 12:48:17.702489074 +0000 UTC m=+1377.122555889" observedRunningTime="2025-12-05 12:48:18.613242025 +0000 UTC m=+1378.033308850" watchObservedRunningTime="2025-12-05 12:48:18.636868863 +0000 UTC m=+1378.056935678" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.423687 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.533302 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle\") pod \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.534298 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs\") pod \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.534568 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data\") pod \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.534708 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgdp8\" (UniqueName: \"kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8\") pod \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\" (UID: \"4a91d833-04c3-4fec-b2dc-8b8072c154d3\") " Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.536384 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs" (OuterVolumeSpecName: "logs") pod "4a91d833-04c3-4fec-b2dc-8b8072c154d3" (UID: "4a91d833-04c3-4fec-b2dc-8b8072c154d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.538540 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8" (OuterVolumeSpecName: "kube-api-access-jgdp8") pod "4a91d833-04c3-4fec-b2dc-8b8072c154d3" (UID: "4a91d833-04c3-4fec-b2dc-8b8072c154d3"). InnerVolumeSpecName "kube-api-access-jgdp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564345 4784 generic.go:334] "Generic (PLEG): container finished" podID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerID="b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" exitCode=0 Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564375 4784 generic.go:334] "Generic (PLEG): container finished" podID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerID="1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" exitCode=143 Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564415 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564454 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerDied","Data":"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02"} Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564480 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerDied","Data":"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12"} Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564493 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a91d833-04c3-4fec-b2dc-8b8072c154d3","Type":"ContainerDied","Data":"fae03d572f756b9caf1894e359c7c70a78d2caa82f8cb28cbd2aa225bf47d473"} Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.564507 4784 scope.go:117] "RemoveContainer" containerID="b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.571134 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a91d833-04c3-4fec-b2dc-8b8072c154d3" (UID: "4a91d833-04c3-4fec-b2dc-8b8072c154d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.576484 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data" (OuterVolumeSpecName: "config-data") pod "4a91d833-04c3-4fec-b2dc-8b8072c154d3" (UID: "4a91d833-04c3-4fec-b2dc-8b8072c154d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.600814 4784 scope.go:117] "RemoveContainer" containerID="1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.620575 4784 scope.go:117] "RemoveContainer" containerID="b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" Dec 05 12:48:19 crc kubenswrapper[4784]: E1205 12:48:19.620924 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02\": container with ID starting with b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02 not found: ID does not exist" containerID="b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.620954 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02"} err="failed to get container status \"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02\": rpc error: code = NotFound desc = could not find container \"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02\": container with ID starting with b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02 not found: ID does not exist" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.620975 4784 scope.go:117] "RemoveContainer" containerID="1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" Dec 05 12:48:19 crc kubenswrapper[4784]: E1205 12:48:19.621307 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12\": container with ID starting with 1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12 not found: ID does not exist" containerID="1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.621328 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12"} err="failed to get container status \"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12\": rpc error: code = NotFound desc = could not find container \"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12\": container with ID starting with 1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12 not found: ID does not exist" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.621342 4784 scope.go:117] "RemoveContainer" containerID="b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.621545 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02"} err="failed to get container status \"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02\": rpc error: code = NotFound desc = could not find container \"b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02\": container with ID starting with b9166665d0db1318f56e34d627f8d703a2eb53a819785168d805aa73d45a1f02 not found: ID does not exist" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.621565 4784 scope.go:117] "RemoveContainer" containerID="1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.621798 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12"} err="failed to get container status \"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12\": rpc error: code = NotFound desc = could not find container \"1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12\": container with ID starting with 1daae9961d2636a72eacfc11c4dd802c18e27b1885e3765a50fb0a205c9e1d12 not found: ID does not exist" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.637311 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.637356 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgdp8\" (UniqueName: \"kubernetes.io/projected/4a91d833-04c3-4fec-b2dc-8b8072c154d3-kube-api-access-jgdp8\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.637366 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91d833-04c3-4fec-b2dc-8b8072c154d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.637374 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a91d833-04c3-4fec-b2dc-8b8072c154d3-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.916555 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.926484 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.937119 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:19 crc kubenswrapper[4784]: E1205 12:48:19.952624 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-metadata" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.952661 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-metadata" Dec 05 12:48:19 crc kubenswrapper[4784]: E1205 12:48:19.952694 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-log" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.952701 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-log" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.953008 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-log" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.953020 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" containerName="nova-metadata-metadata" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.954027 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.956954 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.957111 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:19 crc kubenswrapper[4784]: I1205 12:48:19.968512 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.058374 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ldgj\" (UniqueName: \"kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.058479 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.058503 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.058570 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.058703 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.160417 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ldgj\" (UniqueName: \"kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.160976 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.161006 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.161127 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.161418 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.165885 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.170352 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.174946 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.193953 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ldgj\" (UniqueName: \"kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.194098 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data\") pod \"nova-metadata-0\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.277953 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:20 crc kubenswrapper[4784]: I1205 12:48:20.895321 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:20 crc kubenswrapper[4784]: W1205 12:48:20.898117 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85898168_fe24_44f4_aa83_47064ba85960.slice/crio-b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6 WatchSource:0}: Error finding container b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6: Status 404 returned error can't find the container with id b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6 Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.013867 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a91d833-04c3-4fec-b2dc-8b8072c154d3" path="/var/lib/kubelet/pods/4a91d833-04c3-4fec-b2dc-8b8072c154d3/volumes" Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.603047 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerStarted","Data":"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70"} Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.603552 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerStarted","Data":"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66"} Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.603634 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerStarted","Data":"b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6"} Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.910225 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.911107 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.967481 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:21 crc kubenswrapper[4784]: I1205 12:48:21.969966 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.065459 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.065521 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.376375 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.495601 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.496204 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c688f947-l56ns" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="dnsmasq-dns" containerID="cri-o://ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6" gracePeriod=10 Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.616834 4784 generic.go:334] "Generic (PLEG): container finished" podID="31e25db3-7d9e-43b2-8e5a-b6956be5114e" containerID="36e33246f62869ea234612af8ba86e05fad0a2809ea7858bd8d9d4f14dc6facf" exitCode=0 Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.618765 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9qvf2" event={"ID":"31e25db3-7d9e-43b2-8e5a-b6956be5114e","Type":"ContainerDied","Data":"36e33246f62869ea234612af8ba86e05fad0a2809ea7858bd8d9d4f14dc6facf"} Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.673448 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:48:22 crc kubenswrapper[4784]: I1205 12:48:22.689976 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.68995591 podStartE2EDuration="3.68995591s" podCreationTimestamp="2025-12-05 12:48:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:22.67362805 +0000 UTC m=+1382.093694865" watchObservedRunningTime="2025-12-05 12:48:22.68995591 +0000 UTC m=+1382.110022725" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.115015 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.153602 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.153845 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235619 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235678 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235777 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235839 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235864 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbmpq\" (UniqueName: \"kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.235887 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config\") pod \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\" (UID: \"d4679e9e-a588-435e-96b5-7c2d31a6cc03\") " Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.248966 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq" (OuterVolumeSpecName: "kube-api-access-nbmpq") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "kube-api-access-nbmpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.306826 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.307963 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config" (OuterVolumeSpecName: "config") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.315420 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.323005 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.338611 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.338649 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.338663 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbmpq\" (UniqueName: \"kubernetes.io/projected/d4679e9e-a588-435e-96b5-7c2d31a6cc03-kube-api-access-nbmpq\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.338676 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.338687 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.358634 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4679e9e-a588-435e-96b5-7c2d31a6cc03" (UID: "d4679e9e-a588-435e-96b5-7c2d31a6cc03"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.440950 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4679e9e-a588-435e-96b5-7c2d31a6cc03-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.628958 4784 generic.go:334] "Generic (PLEG): container finished" podID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerID="ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6" exitCode=0 Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.629454 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c688f947-l56ns" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.629473 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c688f947-l56ns" event={"ID":"d4679e9e-a588-435e-96b5-7c2d31a6cc03","Type":"ContainerDied","Data":"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6"} Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.629537 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c688f947-l56ns" event={"ID":"d4679e9e-a588-435e-96b5-7c2d31a6cc03","Type":"ContainerDied","Data":"d1404c95c6677c5892a2801aa6b67f0d1f17fed41fc5e9c9cfaab6535ccac437"} Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.629565 4784 scope.go:117] "RemoveContainer" containerID="ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.680611 4784 scope.go:117] "RemoveContainer" containerID="b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.697595 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.723151 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c688f947-l56ns"] Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.727841 4784 scope.go:117] "RemoveContainer" containerID="ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6" Dec 05 12:48:23 crc kubenswrapper[4784]: E1205 12:48:23.728280 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6\": container with ID starting with ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6 not found: ID does not exist" containerID="ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.728306 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6"} err="failed to get container status \"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6\": rpc error: code = NotFound desc = could not find container \"ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6\": container with ID starting with ae0b55c74257741bcb6f1e7986dff15a2d8bb36545c003befffbafa5cc29ebb6 not found: ID does not exist" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.728329 4784 scope.go:117] "RemoveContainer" containerID="b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00" Dec 05 12:48:23 crc kubenswrapper[4784]: E1205 12:48:23.728523 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00\": container with ID starting with b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00 not found: ID does not exist" containerID="b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00" Dec 05 12:48:23 crc kubenswrapper[4784]: I1205 12:48:23.728543 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00"} err="failed to get container status \"b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00\": rpc error: code = NotFound desc = could not find container \"b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00\": container with ID starting with b11a4f358894c393793c5f2d413a015d3c656b65e8fdc92df4d370dc816a8f00 not found: ID does not exist" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.049432 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.159385 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data\") pod \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.159440 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vrdb\" (UniqueName: \"kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb\") pod \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.159470 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts\") pod \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.159547 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle\") pod \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\" (UID: \"31e25db3-7d9e-43b2-8e5a-b6956be5114e\") " Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.164579 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb" (OuterVolumeSpecName: "kube-api-access-6vrdb") pod "31e25db3-7d9e-43b2-8e5a-b6956be5114e" (UID: "31e25db3-7d9e-43b2-8e5a-b6956be5114e"). InnerVolumeSpecName "kube-api-access-6vrdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.178428 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts" (OuterVolumeSpecName: "scripts") pod "31e25db3-7d9e-43b2-8e5a-b6956be5114e" (UID: "31e25db3-7d9e-43b2-8e5a-b6956be5114e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.190728 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31e25db3-7d9e-43b2-8e5a-b6956be5114e" (UID: "31e25db3-7d9e-43b2-8e5a-b6956be5114e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.194910 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data" (OuterVolumeSpecName: "config-data") pod "31e25db3-7d9e-43b2-8e5a-b6956be5114e" (UID: "31e25db3-7d9e-43b2-8e5a-b6956be5114e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.261681 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.261712 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vrdb\" (UniqueName: \"kubernetes.io/projected/31e25db3-7d9e-43b2-8e5a-b6956be5114e-kube-api-access-6vrdb\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.261721 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.261731 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31e25db3-7d9e-43b2-8e5a-b6956be5114e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.640212 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9qvf2" event={"ID":"31e25db3-7d9e-43b2-8e5a-b6956be5114e","Type":"ContainerDied","Data":"27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6"} Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.640259 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27bf1b2fffec005d7b357054b75c46d23d81b8f80ca8d8aa43aebba3e70839b6" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.640297 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9qvf2" Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.821415 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.821676 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-log" containerID="cri-o://38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580" gracePeriod=30 Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.821780 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-api" containerID="cri-o://f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793" gracePeriod=30 Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.833874 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.834073 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerName="nova-scheduler-scheduler" containerID="cri-o://0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" gracePeriod=30 Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.931949 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.932521 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-log" containerID="cri-o://44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" gracePeriod=30 Dec 05 12:48:24 crc kubenswrapper[4784]: I1205 12:48:24.932906 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-metadata" containerID="cri-o://8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" gracePeriod=30 Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.018150 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" path="/var/lib/kubelet/pods/d4679e9e-a588-435e-96b5-7c2d31a6cc03/volumes" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.279417 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.279455 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.533506 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.651936 4784 generic.go:334] "Generic (PLEG): container finished" podID="85898168-fe24-44f4-aa83-47064ba85960" containerID="8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" exitCode=0 Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.651970 4784 generic.go:334] "Generic (PLEG): container finished" podID="85898168-fe24-44f4-aa83-47064ba85960" containerID="44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" exitCode=143 Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.651983 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.652032 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerDied","Data":"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70"} Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.652095 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerDied","Data":"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66"} Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.652123 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85898168-fe24-44f4-aa83-47064ba85960","Type":"ContainerDied","Data":"b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6"} Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.652139 4784 scope.go:117] "RemoveContainer" containerID="8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.654039 4784 generic.go:334] "Generic (PLEG): container finished" podID="e7b90226-6cca-424b-9a49-d2f1bf8c289f" containerID="0f35c6424f83e93161e92f8c310874f70e2e61e59304c6f30ed83bb875f0de4b" exitCode=0 Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.654061 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69577" event={"ID":"e7b90226-6cca-424b-9a49-d2f1bf8c289f","Type":"ContainerDied","Data":"0f35c6424f83e93161e92f8c310874f70e2e61e59304c6f30ed83bb875f0de4b"} Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.655818 4784 generic.go:334] "Generic (PLEG): container finished" podID="ec225a4a-0751-4625-9540-086443e2a9a7" containerID="38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580" exitCode=143 Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.655842 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerDied","Data":"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580"} Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.679277 4784 scope.go:117] "RemoveContainer" containerID="44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.684958 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle\") pod \"85898168-fe24-44f4-aa83-47064ba85960\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.685107 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs\") pod \"85898168-fe24-44f4-aa83-47064ba85960\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.685319 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs\") pod \"85898168-fe24-44f4-aa83-47064ba85960\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.685761 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs" (OuterVolumeSpecName: "logs") pod "85898168-fe24-44f4-aa83-47064ba85960" (UID: "85898168-fe24-44f4-aa83-47064ba85960"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.685848 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data\") pod \"85898168-fe24-44f4-aa83-47064ba85960\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.685908 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ldgj\" (UniqueName: \"kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj\") pod \"85898168-fe24-44f4-aa83-47064ba85960\" (UID: \"85898168-fe24-44f4-aa83-47064ba85960\") " Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.686398 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85898168-fe24-44f4-aa83-47064ba85960-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.701386 4784 scope.go:117] "RemoveContainer" containerID="8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.701555 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj" (OuterVolumeSpecName: "kube-api-access-9ldgj") pod "85898168-fe24-44f4-aa83-47064ba85960" (UID: "85898168-fe24-44f4-aa83-47064ba85960"). InnerVolumeSpecName "kube-api-access-9ldgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:25 crc kubenswrapper[4784]: E1205 12:48:25.702660 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70\": container with ID starting with 8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70 not found: ID does not exist" containerID="8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.702707 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70"} err="failed to get container status \"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70\": rpc error: code = NotFound desc = could not find container \"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70\": container with ID starting with 8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70 not found: ID does not exist" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.702734 4784 scope.go:117] "RemoveContainer" containerID="44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" Dec 05 12:48:25 crc kubenswrapper[4784]: E1205 12:48:25.705497 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66\": container with ID starting with 44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66 not found: ID does not exist" containerID="44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.705535 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66"} err="failed to get container status \"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66\": rpc error: code = NotFound desc = could not find container \"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66\": container with ID starting with 44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66 not found: ID does not exist" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.705557 4784 scope.go:117] "RemoveContainer" containerID="8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.709671 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70"} err="failed to get container status \"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70\": rpc error: code = NotFound desc = could not find container \"8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70\": container with ID starting with 8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70 not found: ID does not exist" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.709701 4784 scope.go:117] "RemoveContainer" containerID="44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.710622 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66"} err="failed to get container status \"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66\": rpc error: code = NotFound desc = could not find container \"44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66\": container with ID starting with 44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66 not found: ID does not exist" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.717142 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data" (OuterVolumeSpecName: "config-data") pod "85898168-fe24-44f4-aa83-47064ba85960" (UID: "85898168-fe24-44f4-aa83-47064ba85960"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.720362 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85898168-fe24-44f4-aa83-47064ba85960" (UID: "85898168-fe24-44f4-aa83-47064ba85960"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.752374 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "85898168-fe24-44f4-aa83-47064ba85960" (UID: "85898168-fe24-44f4-aa83-47064ba85960"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.788155 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.788199 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ldgj\" (UniqueName: \"kubernetes.io/projected/85898168-fe24-44f4-aa83-47064ba85960-kube-api-access-9ldgj\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.788210 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.788219 4784 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85898168-fe24-44f4-aa83-47064ba85960-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:25 crc kubenswrapper[4784]: I1205 12:48:25.989281 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.005082 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.018496 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.019005 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-metadata" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019030 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-metadata" Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.019054 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="init" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019063 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="init" Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.019081 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-log" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019090 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-log" Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.019110 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="dnsmasq-dns" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019118 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="dnsmasq-dns" Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.019146 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e25db3-7d9e-43b2-8e5a-b6956be5114e" containerName="nova-manage" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019154 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e25db3-7d9e-43b2-8e5a-b6956be5114e" containerName="nova-manage" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019492 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-log" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019510 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4679e9e-a588-435e-96b5-7c2d31a6cc03" containerName="dnsmasq-dns" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019531 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="31e25db3-7d9e-43b2-8e5a-b6956be5114e" containerName="nova-manage" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.019546 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="85898168-fe24-44f4-aa83-47064ba85960" containerName="nova-metadata-metadata" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.020860 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.023291 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.024266 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.029922 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.196828 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84nmt\" (UniqueName: \"kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.196914 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.196979 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.197011 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.197037 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.298987 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.299072 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.299113 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.299214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84nmt\" (UniqueName: \"kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.299303 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.299872 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.303837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.307042 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.315999 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.316046 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84nmt\" (UniqueName: \"kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt\") pod \"nova-metadata-0\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.342619 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.676456 4784 generic.go:334] "Generic (PLEG): container finished" podID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerID="0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" exitCode=0 Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.677072 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ab7198f-769d-49fc-9b8d-cf5825a15018","Type":"ContainerDied","Data":"0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857"} Dec 05 12:48:26 crc kubenswrapper[4784]: I1205 12:48:26.803284 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.910928 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857 is running failed: container process not found" containerID="0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.912156 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857 is running failed: container process not found" containerID="0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.912580 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857 is running failed: container process not found" containerID="0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:48:26 crc kubenswrapper[4784]: E1205 12:48:26.912642 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerName="nova-scheduler-scheduler" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.017758 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85898168-fe24-44f4-aa83-47064ba85960" path="/var/lib/kubelet/pods/85898168-fe24-44f4-aa83-47064ba85960/volumes" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.134073 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.141300 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.217209 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mscbw\" (UniqueName: \"kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw\") pod \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.217321 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle\") pod \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.217352 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts\") pod \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.217389 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data\") pod \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\" (UID: \"e7b90226-6cca-424b-9a49-d2f1bf8c289f\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.227426 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts" (OuterVolumeSpecName: "scripts") pod "e7b90226-6cca-424b-9a49-d2f1bf8c289f" (UID: "e7b90226-6cca-424b-9a49-d2f1bf8c289f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.235553 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw" (OuterVolumeSpecName: "kube-api-access-mscbw") pod "e7b90226-6cca-424b-9a49-d2f1bf8c289f" (UID: "e7b90226-6cca-424b-9a49-d2f1bf8c289f"). InnerVolumeSpecName "kube-api-access-mscbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.254898 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data" (OuterVolumeSpecName: "config-data") pod "e7b90226-6cca-424b-9a49-d2f1bf8c289f" (UID: "e7b90226-6cca-424b-9a49-d2f1bf8c289f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.268463 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7b90226-6cca-424b-9a49-d2f1bf8c289f" (UID: "e7b90226-6cca-424b-9a49-d2f1bf8c289f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.319366 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n78cq\" (UniqueName: \"kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq\") pod \"1ab7198f-769d-49fc-9b8d-cf5825a15018\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.319415 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle\") pod \"1ab7198f-769d-49fc-9b8d-cf5825a15018\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.319699 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data\") pod \"1ab7198f-769d-49fc-9b8d-cf5825a15018\" (UID: \"1ab7198f-769d-49fc-9b8d-cf5825a15018\") " Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.320125 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.320136 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.320145 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mscbw\" (UniqueName: \"kubernetes.io/projected/e7b90226-6cca-424b-9a49-d2f1bf8c289f-kube-api-access-mscbw\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.320155 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90226-6cca-424b-9a49-d2f1bf8c289f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.323289 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq" (OuterVolumeSpecName: "kube-api-access-n78cq") pod "1ab7198f-769d-49fc-9b8d-cf5825a15018" (UID: "1ab7198f-769d-49fc-9b8d-cf5825a15018"). InnerVolumeSpecName "kube-api-access-n78cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.346101 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ab7198f-769d-49fc-9b8d-cf5825a15018" (UID: "1ab7198f-769d-49fc-9b8d-cf5825a15018"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.349424 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data" (OuterVolumeSpecName: "config-data") pod "1ab7198f-769d-49fc-9b8d-cf5825a15018" (UID: "1ab7198f-769d-49fc-9b8d-cf5825a15018"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.421710 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.421758 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n78cq\" (UniqueName: \"kubernetes.io/projected/1ab7198f-769d-49fc-9b8d-cf5825a15018-kube-api-access-n78cq\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.421767 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab7198f-769d-49fc-9b8d-cf5825a15018-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.688598 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerStarted","Data":"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725"} Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.688642 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerStarted","Data":"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34"} Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.688654 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerStarted","Data":"c75fdba268b6f9ffcdac6ee7a07a63b211908d13915d143a690151e39ffd9dff"} Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.690601 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ab7198f-769d-49fc-9b8d-cf5825a15018","Type":"ContainerDied","Data":"12e2a168a59b632b9b06e46f5689f095e9e8cec4d9a9d0582d77945ee09c1325"} Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.690602 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.690638 4784 scope.go:117] "RemoveContainer" containerID="0a80545960e1f69c8912f9c3b29a3e0f1d8fe17acd09ee2977e4bc984c4fa857" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.696562 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-69577" event={"ID":"e7b90226-6cca-424b-9a49-d2f1bf8c289f","Type":"ContainerDied","Data":"67cab04da162255c0fecc99ad375061407d2b299fa1072729a38505117116111"} Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.696600 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67cab04da162255c0fecc99ad375061407d2b299fa1072729a38505117116111" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.696671 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-69577" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.756456 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.756435885 podStartE2EDuration="2.756435885s" podCreationTimestamp="2025-12-05 12:48:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:27.72842098 +0000 UTC m=+1387.148487795" watchObservedRunningTime="2025-12-05 12:48:27.756435885 +0000 UTC m=+1387.176502700" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.766797 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:48:27 crc kubenswrapper[4784]: E1205 12:48:27.767239 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerName="nova-scheduler-scheduler" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.767261 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerName="nova-scheduler-scheduler" Dec 05 12:48:27 crc kubenswrapper[4784]: E1205 12:48:27.767285 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7b90226-6cca-424b-9a49-d2f1bf8c289f" containerName="nova-cell1-conductor-db-sync" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.767292 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7b90226-6cca-424b-9a49-d2f1bf8c289f" containerName="nova-cell1-conductor-db-sync" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.767474 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" containerName="nova-scheduler-scheduler" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.767503 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7b90226-6cca-424b-9a49-d2f1bf8c289f" containerName="nova-cell1-conductor-db-sync" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.771078 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.776413 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.779973 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.938052 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.938266 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdskx\" (UniqueName: \"kubernetes.io/projected/2f743e41-0208-45ae-940c-104f0c9442ba-kube-api-access-pdskx\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.938314 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.960835 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.979724 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.989895 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.991593 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:27 crc kubenswrapper[4784]: I1205 12:48:27.993736 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.005145 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.040347 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdskx\" (UniqueName: \"kubernetes.io/projected/2f743e41-0208-45ae-940c-104f0c9442ba-kube-api-access-pdskx\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.040382 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.040487 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.046627 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.047378 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f743e41-0208-45ae-940c-104f0c9442ba-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.061892 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdskx\" (UniqueName: \"kubernetes.io/projected/2f743e41-0208-45ae-940c-104f0c9442ba-kube-api-access-pdskx\") pod \"nova-cell1-conductor-0\" (UID: \"2f743e41-0208-45ae-940c-104f0c9442ba\") " pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.142476 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.142546 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.142646 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zll2f\" (UniqueName: \"kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.159266 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.244804 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs\") pod \"ec225a4a-0751-4625-9540-086443e2a9a7\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.245099 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr4rk\" (UniqueName: \"kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk\") pod \"ec225a4a-0751-4625-9540-086443e2a9a7\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.245309 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data\") pod \"ec225a4a-0751-4625-9540-086443e2a9a7\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.245483 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs" (OuterVolumeSpecName: "logs") pod "ec225a4a-0751-4625-9540-086443e2a9a7" (UID: "ec225a4a-0751-4625-9540-086443e2a9a7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.245847 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle\") pod \"ec225a4a-0751-4625-9540-086443e2a9a7\" (UID: \"ec225a4a-0751-4625-9540-086443e2a9a7\") " Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.246611 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.246675 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.246906 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zll2f\" (UniqueName: \"kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.247332 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec225a4a-0751-4625-9540-086443e2a9a7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.250262 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk" (OuterVolumeSpecName: "kube-api-access-tr4rk") pod "ec225a4a-0751-4625-9540-086443e2a9a7" (UID: "ec225a4a-0751-4625-9540-086443e2a9a7"). InnerVolumeSpecName "kube-api-access-tr4rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.251130 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.251718 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.253345 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.267062 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zll2f\" (UniqueName: \"kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f\") pod \"nova-scheduler-0\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.274684 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data" (OuterVolumeSpecName: "config-data") pod "ec225a4a-0751-4625-9540-086443e2a9a7" (UID: "ec225a4a-0751-4625-9540-086443e2a9a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.285554 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec225a4a-0751-4625-9540-086443e2a9a7" (UID: "ec225a4a-0751-4625-9540-086443e2a9a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.316711 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.348791 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr4rk\" (UniqueName: \"kubernetes.io/projected/ec225a4a-0751-4625-9540-086443e2a9a7-kube-api-access-tr4rk\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.348823 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.348831 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec225a4a-0751-4625-9540-086443e2a9a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.708663 4784 generic.go:334] "Generic (PLEG): container finished" podID="ec225a4a-0751-4625-9540-086443e2a9a7" containerID="f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793" exitCode=0 Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.708721 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerDied","Data":"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793"} Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.708749 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ec225a4a-0751-4625-9540-086443e2a9a7","Type":"ContainerDied","Data":"130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754"} Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.708768 4784 scope.go:117] "RemoveContainer" containerID="f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.708896 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.754151 4784 scope.go:117] "RemoveContainer" containerID="38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.759776 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.781208 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.786848 4784 scope.go:117] "RemoveContainer" containerID="f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787175 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: E1205 12:48:28.787408 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793\": container with ID starting with f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793 not found: ID does not exist" containerID="f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787456 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793"} err="failed to get container status \"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793\": rpc error: code = NotFound desc = could not find container \"f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793\": container with ID starting with f16bd9d5b32486cc8b7b81ed2382ec407b19bb253b6ab9928f5da9404ca63793 not found: ID does not exist" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787487 4784 scope.go:117] "RemoveContainer" containerID="38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580" Dec 05 12:48:28 crc kubenswrapper[4784]: E1205 12:48:28.787751 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-api" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787777 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-api" Dec 05 12:48:28 crc kubenswrapper[4784]: E1205 12:48:28.787795 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-log" Dec 05 12:48:28 crc kubenswrapper[4784]: E1205 12:48:28.787796 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580\": container with ID starting with 38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580 not found: ID does not exist" containerID="38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787836 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580"} err="failed to get container status \"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580\": rpc error: code = NotFound desc = could not find container \"38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580\": container with ID starting with 38bd8b01db6a75d26a460ccbee052d43609508f7fb42398138b511acb7eac580 not found: ID does not exist" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.787807 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-log" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.788310 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-api" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.788340 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" containerName="nova-api-log" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.789724 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.792244 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.799347 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.832508 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.884725 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.970073 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.970480 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4ks7\" (UniqueName: \"kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.970548 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:28 crc kubenswrapper[4784]: I1205 12:48:28.970573 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.011457 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ab7198f-769d-49fc-9b8d-cf5825a15018" path="/var/lib/kubelet/pods/1ab7198f-769d-49fc-9b8d-cf5825a15018/volumes" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.012218 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec225a4a-0751-4625-9540-086443e2a9a7" path="/var/lib/kubelet/pods/ec225a4a-0751-4625-9540-086443e2a9a7/volumes" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.072599 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.072750 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4ks7\" (UniqueName: \"kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.072821 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.072852 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.073152 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.080284 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.081684 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.099556 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4ks7\" (UniqueName: \"kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7\") pod \"nova-api-0\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.116040 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.597992 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.730434 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ff15091d-0644-490f-968b-bbbba4cd5d99","Type":"ContainerStarted","Data":"03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf"} Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.730491 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ff15091d-0644-490f-968b-bbbba4cd5d99","Type":"ContainerStarted","Data":"b6bdec982730f5a1e683e9927843c4a9bdd4ad07c73cb5be955cc09da93f3f4d"} Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.733380 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2f743e41-0208-45ae-940c-104f0c9442ba","Type":"ContainerStarted","Data":"554335333ad543b9972b5b90236c57864565dbbdcf5546a7b3665206819ea411"} Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.733415 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2f743e41-0208-45ae-940c-104f0c9442ba","Type":"ContainerStarted","Data":"08ebefbdd099facb8420192bbde7d463a236c797bc7a1927f04141776b6eca8b"} Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.734713 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.737094 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerStarted","Data":"c80a3619858260177d8d1faa684ca90375fb5540492cace86a80df3cb6c83428"} Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.749042 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.749020812 podStartE2EDuration="2.749020812s" podCreationTimestamp="2025-12-05 12:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:29.746967508 +0000 UTC m=+1389.167034323" watchObservedRunningTime="2025-12-05 12:48:29.749020812 +0000 UTC m=+1389.169087627" Dec 05 12:48:29 crc kubenswrapper[4784]: I1205 12:48:29.779167 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.779120162 podStartE2EDuration="2.779120162s" podCreationTimestamp="2025-12-05 12:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:29.767159428 +0000 UTC m=+1389.187226263" watchObservedRunningTime="2025-12-05 12:48:29.779120162 +0000 UTC m=+1389.199186997" Dec 05 12:48:30 crc kubenswrapper[4784]: I1205 12:48:30.751873 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerStarted","Data":"335cef7bf1a998fdb7b0f5da6a2a496a3ee7a35b2eb7645ebd0dfd66156ae272"} Dec 05 12:48:30 crc kubenswrapper[4784]: I1205 12:48:30.752261 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerStarted","Data":"4f3fcef9975a4b8974540e5775669349ecd81d29eb50b8bf1810d671b44f4c6d"} Dec 05 12:48:31 crc kubenswrapper[4784]: I1205 12:48:31.343272 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:48:31 crc kubenswrapper[4784]: I1205 12:48:31.343435 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:48:33 crc kubenswrapper[4784]: I1205 12:48:33.317741 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:48:36 crc kubenswrapper[4784]: I1205 12:48:36.343499 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:48:36 crc kubenswrapper[4784]: I1205 12:48:36.344137 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:48:37 crc kubenswrapper[4784]: I1205 12:48:37.355400 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:37 crc kubenswrapper[4784]: I1205 12:48:37.355413 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:38 crc kubenswrapper[4784]: I1205 12:48:38.300091 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 12:48:38 crc kubenswrapper[4784]: I1205 12:48:38.317527 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=10.317507753 podStartE2EDuration="10.317507753s" podCreationTimestamp="2025-12-05 12:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:30.771484294 +0000 UTC m=+1390.191551139" watchObservedRunningTime="2025-12-05 12:48:38.317507753 +0000 UTC m=+1397.737574568" Dec 05 12:48:38 crc kubenswrapper[4784]: I1205 12:48:38.318325 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:48:38 crc kubenswrapper[4784]: I1205 12:48:38.346094 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:48:38 crc kubenswrapper[4784]: I1205 12:48:38.867172 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:48:39 crc kubenswrapper[4784]: I1205 12:48:39.117646 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:48:39 crc kubenswrapper[4784]: I1205 12:48:39.117955 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:48:40 crc kubenswrapper[4784]: I1205 12:48:40.202360 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.219:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:40 crc kubenswrapper[4784]: I1205 12:48:40.202439 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.219:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:48:41 crc kubenswrapper[4784]: E1205 12:48:41.006904 4784 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/79d42b58296335e1de7746ae593dc94e99430d151e83d1a936bd256aacc98642/diff" to get inode usage: stat /var/lib/containers/storage/overlay/79d42b58296335e1de7746ae593dc94e99430d151e83d1a936bd256aacc98642/diff: no such file or directory, extraDiskErr: Dec 05 12:48:41 crc kubenswrapper[4784]: I1205 12:48:41.649557 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 12:48:42 crc kubenswrapper[4784]: E1205 12:48:42.602481 4784 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/26ff4093e02b6c2a48977cf757823c0af82a2d10f4340a212094d50f1c569e52/diff" to get inode usage: stat /var/lib/containers/storage/overlay/26ff4093e02b6c2a48977cf757823c0af82a2d10f4340a212094d50f1c569e52/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_dnsmasq-dns-c688f947-l56ns_d4679e9e-a588-435e-96b5-7c2d31a6cc03/dnsmasq-dns/0.log" to get inode usage: stat /var/log/pods/openstack_dnsmasq-dns-c688f947-l56ns_d4679e9e-a588-435e-96b5-7c2d31a6cc03/dnsmasq-dns/0.log: no such file or directory Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.163775 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.164873 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" containerName="kube-state-metrics" containerID="cri-o://e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843" gracePeriod=30 Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.752955 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.827869 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kw8x\" (UniqueName: \"kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x\") pod \"28e48eb5-7362-46c6-8b58-d1d5f1a111c9\" (UID: \"28e48eb5-7362-46c6-8b58-d1d5f1a111c9\") " Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.834910 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x" (OuterVolumeSpecName: "kube-api-access-6kw8x") pod "28e48eb5-7362-46c6-8b58-d1d5f1a111c9" (UID: "28e48eb5-7362-46c6-8b58-d1d5f1a111c9"). InnerVolumeSpecName "kube-api-access-6kw8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.909673 4784 generic.go:334] "Generic (PLEG): container finished" podID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" containerID="e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843" exitCode=2 Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.909735 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28e48eb5-7362-46c6-8b58-d1d5f1a111c9","Type":"ContainerDied","Data":"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843"} Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.909763 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.909787 4784 scope.go:117] "RemoveContainer" containerID="e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.909775 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28e48eb5-7362-46c6-8b58-d1d5f1a111c9","Type":"ContainerDied","Data":"b359d53fa8c827acd441aea65607d08da9732cf11c084ef406cb64965906373f"} Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.946106 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kw8x\" (UniqueName: \"kubernetes.io/projected/28e48eb5-7362-46c6-8b58-d1d5f1a111c9-kube-api-access-6kw8x\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.952485 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.954975 4784 scope.go:117] "RemoveContainer" containerID="e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843" Dec 05 12:48:45 crc kubenswrapper[4784]: E1205 12:48:45.955506 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843\": container with ID starting with e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843 not found: ID does not exist" containerID="e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.955547 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843"} err="failed to get container status \"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843\": rpc error: code = NotFound desc = could not find container \"e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843\": container with ID starting with e94fdcb08f60d0d60f5e6fcfdcaaf6dc634b77cbd81eecc2ac2a73aef2145843 not found: ID does not exist" Dec 05 12:48:45 crc kubenswrapper[4784]: I1205 12:48:45.969950 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.048963 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:46 crc kubenswrapper[4784]: E1205 12:48:46.049532 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" containerName="kube-state-metrics" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.049572 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" containerName="kube-state-metrics" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.049837 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" containerName="kube-state-metrics" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.050771 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.053295 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.054469 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.058532 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.150677 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.150791 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.151142 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhxgh\" (UniqueName: \"kubernetes.io/projected/20314f76-fd12-4756-96b9-88485d32d3e0-kube-api-access-mhxgh\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.151244 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.252918 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.252981 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.253051 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.253154 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhxgh\" (UniqueName: \"kubernetes.io/projected/20314f76-fd12-4756-96b9-88485d32d3e0-kube-api-access-mhxgh\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.258273 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.258331 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.259222 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20314f76-fd12-4756-96b9-88485d32d3e0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.277326 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhxgh\" (UniqueName: \"kubernetes.io/projected/20314f76-fd12-4756-96b9-88485d32d3e0-kube-api-access-mhxgh\") pod \"kube-state-metrics-0\" (UID: \"20314f76-fd12-4756-96b9-88485d32d3e0\") " pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.353484 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.353624 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.358627 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.360944 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.368035 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.867865 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 12:48:46 crc kubenswrapper[4784]: I1205 12:48:46.922667 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20314f76-fd12-4756-96b9-88485d32d3e0","Type":"ContainerStarted","Data":"212c11533fb4c3a6b4bd959c2a6ea6cf9817ddeca9ac8d863e23431798453682"} Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.011454 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28e48eb5-7362-46c6-8b58-d1d5f1a111c9" path="/var/lib/kubelet/pods/28e48eb5-7362-46c6-8b58-d1d5f1a111c9/volumes" Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.159564 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.159865 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-central-agent" containerID="cri-o://dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34" gracePeriod=30 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.160021 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="proxy-httpd" containerID="cri-o://1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03" gracePeriod=30 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.160077 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="sg-core" containerID="cri-o://fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d" gracePeriod=30 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.160128 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-notification-agent" containerID="cri-o://9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66" gracePeriod=30 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.935915 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"20314f76-fd12-4756-96b9-88485d32d3e0","Type":"ContainerStarted","Data":"c84ea28c2d134672e8f8485c832f54d8831daf57fc5714b0ce584d11aad7ff6d"} Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.937214 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940454 4784 generic.go:334] "Generic (PLEG): container finished" podID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerID="1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03" exitCode=0 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940496 4784 generic.go:334] "Generic (PLEG): container finished" podID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerID="fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d" exitCode=2 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940508 4784 generic.go:334] "Generic (PLEG): container finished" podID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerID="dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34" exitCode=0 Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940536 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerDied","Data":"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03"} Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940592 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerDied","Data":"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d"} Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.940609 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerDied","Data":"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34"} Dec 05 12:48:47 crc kubenswrapper[4784]: I1205 12:48:47.965895 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.563506303 podStartE2EDuration="2.965868908s" podCreationTimestamp="2025-12-05 12:48:45 +0000 UTC" firstStartedPulling="2025-12-05 12:48:46.868751446 +0000 UTC m=+1406.288818251" lastFinishedPulling="2025-12-05 12:48:47.271114041 +0000 UTC m=+1406.691180856" observedRunningTime="2025-12-05 12:48:47.957324021 +0000 UTC m=+1407.377390836" watchObservedRunningTime="2025-12-05 12:48:47.965868908 +0000 UTC m=+1407.385935733" Dec 05 12:48:48 crc kubenswrapper[4784]: W1205 12:48:48.591574 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85898168_fe24_44f4_aa83_47064ba85960.slice/crio-44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66.scope WatchSource:0}: Error finding container 44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66: Status 404 returned error can't find the container with id 44ee1c6be3f05f4d6512bb7169d828f894c4ec648197e8830557a9fb0646bd66 Dec 05 12:48:48 crc kubenswrapper[4784]: E1205 12:48:48.592023 4784 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec225a4a_0751_4625_9540_086443e2a9a7.slice/crio-130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754: Error finding container 130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754: Status 404 returned error can't find the container with id 130eaf808c9d31c4f9c3d793d4f79a238ea77af7e0564839856bd1361f481754 Dec 05 12:48:48 crc kubenswrapper[4784]: E1205 12:48:48.592279 4784 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85898168_fe24_44f4_aa83_47064ba85960.slice/crio-b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6: Error finding container b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6: Status 404 returned error can't find the container with id b22ea7fc8e0f4f526151693ade4df30751260c26acd46cc79a71228b02ebc6b6 Dec 05 12:48:48 crc kubenswrapper[4784]: W1205 12:48:48.594421 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85898168_fe24_44f4_aa83_47064ba85960.slice/crio-8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70.scope WatchSource:0}: Error finding container 8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70: Status 404 returned error can't find the container with id 8fc1c1bab12e2382df45c1fa2f746524d1b6ea09c7d7ba42405ddcc093f57d70 Dec 05 12:48:48 crc kubenswrapper[4784]: I1205 12:48:48.960248 4784 generic.go:334] "Generic (PLEG): container finished" podID="73bd63f0-a2ad-4433-b680-8628c737531c" containerID="6934a0f2f73c3ac881bf5c89e07018e75a7fd91d74c96648f990ee733eb5c4ac" exitCode=137 Dec 05 12:48:48 crc kubenswrapper[4784]: I1205 12:48:48.960416 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bd63f0-a2ad-4433-b680-8628c737531c","Type":"ContainerDied","Data":"6934a0f2f73c3ac881bf5c89e07018e75a7fd91d74c96648f990ee733eb5c4ac"} Dec 05 12:48:48 crc kubenswrapper[4784]: I1205 12:48:48.960625 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bd63f0-a2ad-4433-b680-8628c737531c","Type":"ContainerDied","Data":"79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1"} Dec 05 12:48:48 crc kubenswrapper[4784]: I1205 12:48:48.960639 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79af199205285e0888228568956dd37ea494668039a2951d06a4d2e464e9c4b1" Dec 05 12:48:48 crc kubenswrapper[4784]: I1205 12:48:48.976128 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.112302 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z9kt\" (UniqueName: \"kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt\") pod \"73bd63f0-a2ad-4433-b680-8628c737531c\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.112570 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle\") pod \"73bd63f0-a2ad-4433-b680-8628c737531c\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.112743 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data\") pod \"73bd63f0-a2ad-4433-b680-8628c737531c\" (UID: \"73bd63f0-a2ad-4433-b680-8628c737531c\") " Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.120326 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt" (OuterVolumeSpecName: "kube-api-access-5z9kt") pod "73bd63f0-a2ad-4433-b680-8628c737531c" (UID: "73bd63f0-a2ad-4433-b680-8628c737531c"). InnerVolumeSpecName "kube-api-access-5z9kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.131041 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.132105 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.150089 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73bd63f0-a2ad-4433-b680-8628c737531c" (UID: "73bd63f0-a2ad-4433-b680-8628c737531c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.150865 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.157880 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.164231 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data" (OuterVolumeSpecName: "config-data") pod "73bd63f0-a2ad-4433-b680-8628c737531c" (UID: "73bd63f0-a2ad-4433-b680-8628c737531c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.215274 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.215312 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bd63f0-a2ad-4433-b680-8628c737531c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.215326 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z9kt\" (UniqueName: \"kubernetes.io/projected/73bd63f0-a2ad-4433-b680-8628c737531c-kube-api-access-5z9kt\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.968295 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.968537 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:48:49 crc kubenswrapper[4784]: I1205 12:48:49.976109 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.035081 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.049431 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.060856 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:50 crc kubenswrapper[4784]: E1205 12:48:50.061713 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73bd63f0-a2ad-4433-b680-8628c737531c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.061748 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="73bd63f0-a2ad-4433-b680-8628c737531c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.062073 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="73bd63f0-a2ad-4433-b680-8628c737531c" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.063321 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.066860 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.067422 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.067711 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.073587 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.182517 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.240306 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.255760 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk26t\" (UniqueName: \"kubernetes.io/projected/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-kube-api-access-lk26t\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.255852 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.255906 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.255933 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.255961 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.258721 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.357687 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.357829 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358002 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358158 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdzvf\" (UniqueName: \"kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358295 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk26t\" (UniqueName: \"kubernetes.io/projected/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-kube-api-access-lk26t\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358481 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358570 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358625 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358670 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358711 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.358783 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.366460 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.366978 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.373563 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.376113 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.381078 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk26t\" (UniqueName: \"kubernetes.io/projected/8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550-kube-api-access-lk26t\") pod \"nova-cell1-novncproxy-0\" (UID: \"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.389176 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461165 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461272 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461301 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461328 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461368 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.461403 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdzvf\" (UniqueName: \"kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.462159 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.462221 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.462403 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.462530 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.462869 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.481823 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdzvf\" (UniqueName: \"kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf\") pod \"dnsmasq-dns-54fbd6cd5c-xzczx\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.606482 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.907358 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 12:48:50 crc kubenswrapper[4784]: I1205 12:48:50.986482 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550","Type":"ContainerStarted","Data":"e81e97f56d389e1b664516183d0aea99226fc7ce89c1c75547e1719a37bca3e8"} Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.020967 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73bd63f0-a2ad-4433-b680-8628c737531c" path="/var/lib/kubelet/pods/73bd63f0-a2ad-4433-b680-8628c737531c/volumes" Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.138231 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.994878 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550","Type":"ContainerStarted","Data":"d0ad7cbbfdd6450a32e4c6a6e70b7db8d5808482e8980fddd8346cdc0339950d"} Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.998032 4784 generic.go:334] "Generic (PLEG): container finished" podID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerID="a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec" exitCode=0 Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.999136 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" event={"ID":"0387d9db-dbb7-404e-991b-f67a7f02a1bf","Type":"ContainerDied","Data":"a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec"} Dec 05 12:48:51 crc kubenswrapper[4784]: I1205 12:48:51.999173 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" event={"ID":"0387d9db-dbb7-404e-991b-f67a7f02a1bf","Type":"ContainerStarted","Data":"8c2e179c09a3fc0bf08d2f1257b43ad15e554a9b9edff3aec6185eebf89314f6"} Dec 05 12:48:52 crc kubenswrapper[4784]: I1205 12:48:52.060669 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.060652766 podStartE2EDuration="2.060652766s" podCreationTimestamp="2025-12-05 12:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:52.045973898 +0000 UTC m=+1411.466040713" watchObservedRunningTime="2025-12-05 12:48:52.060652766 +0000 UTC m=+1411.480719581" Dec 05 12:48:52 crc kubenswrapper[4784]: I1205 12:48:52.782057 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:53 crc kubenswrapper[4784]: I1205 12:48:53.034578 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-log" containerID="cri-o://4f3fcef9975a4b8974540e5775669349ecd81d29eb50b8bf1810d671b44f4c6d" gracePeriod=30 Dec 05 12:48:53 crc kubenswrapper[4784]: I1205 12:48:53.035819 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" event={"ID":"0387d9db-dbb7-404e-991b-f67a7f02a1bf","Type":"ContainerStarted","Data":"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a"} Dec 05 12:48:53 crc kubenswrapper[4784]: I1205 12:48:53.035862 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:48:53 crc kubenswrapper[4784]: I1205 12:48:53.036829 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-api" containerID="cri-o://335cef7bf1a998fdb7b0f5da6a2a496a3ee7a35b2eb7645ebd0dfd66156ae272" gracePeriod=30 Dec 05 12:48:53 crc kubenswrapper[4784]: I1205 12:48:53.066568 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" podStartSLOduration=3.066546791 podStartE2EDuration="3.066546791s" podCreationTimestamp="2025-12-05 12:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:53.063368571 +0000 UTC m=+1412.483435396" watchObservedRunningTime="2025-12-05 12:48:53.066546791 +0000 UTC m=+1412.486613606" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.047425 4784 generic.go:334] "Generic (PLEG): container finished" podID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerID="335cef7bf1a998fdb7b0f5da6a2a496a3ee7a35b2eb7645ebd0dfd66156ae272" exitCode=0 Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.047743 4784 generic.go:334] "Generic (PLEG): container finished" podID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerID="4f3fcef9975a4b8974540e5775669349ecd81d29eb50b8bf1810d671b44f4c6d" exitCode=143 Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.047534 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerDied","Data":"335cef7bf1a998fdb7b0f5da6a2a496a3ee7a35b2eb7645ebd0dfd66156ae272"} Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.048617 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerDied","Data":"4f3fcef9975a4b8974540e5775669349ecd81d29eb50b8bf1810d671b44f4c6d"} Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.376681 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.469127 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data\") pod \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.469286 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle\") pod \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.469416 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4ks7\" (UniqueName: \"kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7\") pod \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.469469 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs\") pod \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\" (UID: \"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6\") " Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.470045 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs" (OuterVolumeSpecName: "logs") pod "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" (UID: "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.498170 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7" (OuterVolumeSpecName: "kube-api-access-c4ks7") pod "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" (UID: "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6"). InnerVolumeSpecName "kube-api-access-c4ks7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.507303 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data" (OuterVolumeSpecName: "config-data") pod "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" (UID: "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.524537 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" (UID: "d0221a3f-ba9f-4673-8ebf-fd468acbb6a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.572255 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.572288 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.572304 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4ks7\" (UniqueName: \"kubernetes.io/projected/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-kube-api-access-c4ks7\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:54 crc kubenswrapper[4784]: I1205 12:48:54.572317 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.059461 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d0221a3f-ba9f-4673-8ebf-fd468acbb6a6","Type":"ContainerDied","Data":"c80a3619858260177d8d1faa684ca90375fb5540492cace86a80df3cb6c83428"} Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.059503 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.059524 4784 scope.go:117] "RemoveContainer" containerID="335cef7bf1a998fdb7b0f5da6a2a496a3ee7a35b2eb7645ebd0dfd66156ae272" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.091270 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.092307 4784 scope.go:117] "RemoveContainer" containerID="4f3fcef9975a4b8974540e5775669349ecd81d29eb50b8bf1810d671b44f4c6d" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.106639 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.129511 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:55 crc kubenswrapper[4784]: E1205 12:48:55.130017 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-api" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.130040 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-api" Dec 05 12:48:55 crc kubenswrapper[4784]: E1205 12:48:55.130077 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-log" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.130085 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-log" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.130279 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-log" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.130305 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" containerName="nova-api-api" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.131440 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.137693 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.137862 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.137992 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.142103 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.183730 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.183928 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.183969 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtw47\" (UniqueName: \"kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.184084 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.184134 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.184167 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286233 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286280 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtw47\" (UniqueName: \"kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286347 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286366 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286385 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.286429 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.287663 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.291981 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.293247 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.294969 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.304884 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.306487 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtw47\" (UniqueName: \"kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47\") pod \"nova-api-0\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.389839 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.491747 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:48:55 crc kubenswrapper[4784]: I1205 12:48:55.980966 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.074485 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerStarted","Data":"391738962f869693610c0c5479fbf505fabc6cc831538a320b9199c6622e2d18"} Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.376207 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.875551 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928650 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928760 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928809 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tmpl\" (UniqueName: \"kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928842 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928870 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.928988 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.929113 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts\") pod \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\" (UID: \"bff391e4-6d41-4a89-aeb6-fe47077b3e77\") " Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.929572 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.929725 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.935025 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts" (OuterVolumeSpecName: "scripts") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:56 crc kubenswrapper[4784]: I1205 12:48:56.956810 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl" (OuterVolumeSpecName: "kube-api-access-4tmpl") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "kube-api-access-4tmpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.006889 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.021424 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0221a3f-ba9f-4673-8ebf-fd468acbb6a6" path="/var/lib/kubelet/pods/d0221a3f-ba9f-4673-8ebf-fd468acbb6a6/volumes" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.037259 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.037512 4784 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.037540 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tmpl\" (UniqueName: \"kubernetes.io/projected/bff391e4-6d41-4a89-aeb6-fe47077b3e77-kube-api-access-4tmpl\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.037552 4784 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bff391e4-6d41-4a89-aeb6-fe47077b3e77-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.037564 4784 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.067856 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.089848 4784 generic.go:334] "Generic (PLEG): container finished" podID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerID="9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66" exitCode=0 Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.089943 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.089950 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerDied","Data":"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66"} Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.090001 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bff391e4-6d41-4a89-aeb6-fe47077b3e77","Type":"ContainerDied","Data":"5e3775fa0a50e49ed01ef79aedf1c3e05afa6aa82da770a7f6439c289e6579ba"} Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.090018 4784 scope.go:117] "RemoveContainer" containerID="1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.095314 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerStarted","Data":"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894"} Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.095359 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerStarted","Data":"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75"} Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.101411 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data" (OuterVolumeSpecName: "config-data") pod "bff391e4-6d41-4a89-aeb6-fe47077b3e77" (UID: "bff391e4-6d41-4a89-aeb6-fe47077b3e77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.113449 4784 scope.go:117] "RemoveContainer" containerID="fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.123283 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.12326016 podStartE2EDuration="2.12326016s" podCreationTimestamp="2025-12-05 12:48:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:48:57.118532162 +0000 UTC m=+1416.538598977" watchObservedRunningTime="2025-12-05 12:48:57.12326016 +0000 UTC m=+1416.543326995" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.139146 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.139197 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff391e4-6d41-4a89-aeb6-fe47077b3e77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.148435 4784 scope.go:117] "RemoveContainer" containerID="9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.167789 4784 scope.go:117] "RemoveContainer" containerID="dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.184436 4784 scope.go:117] "RemoveContainer" containerID="1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.184807 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03\": container with ID starting with 1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03 not found: ID does not exist" containerID="1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.184855 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03"} err="failed to get container status \"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03\": rpc error: code = NotFound desc = could not find container \"1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03\": container with ID starting with 1398dfd10d2ba4915fc7d890d7adb83c27d104460ae6a362707ca218b5665a03 not found: ID does not exist" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.184889 4784 scope.go:117] "RemoveContainer" containerID="fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.185206 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d\": container with ID starting with fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d not found: ID does not exist" containerID="fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.185253 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d"} err="failed to get container status \"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d\": rpc error: code = NotFound desc = could not find container \"fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d\": container with ID starting with fcb8005fcd8e1d1d4ab6bf26b5ba897627071354e0ecee0f519975f589ce6e5d not found: ID does not exist" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.185282 4784 scope.go:117] "RemoveContainer" containerID="9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.185615 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66\": container with ID starting with 9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66 not found: ID does not exist" containerID="9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.185673 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66"} err="failed to get container status \"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66\": rpc error: code = NotFound desc = could not find container \"9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66\": container with ID starting with 9cf1ac959726a2b0e339e1e33fabb23826b9696f96f5b8c611be596efce71c66 not found: ID does not exist" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.185706 4784 scope.go:117] "RemoveContainer" containerID="dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.186002 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34\": container with ID starting with dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34 not found: ID does not exist" containerID="dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.186032 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34"} err="failed to get container status \"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34\": rpc error: code = NotFound desc = could not find container \"dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34\": container with ID starting with dc78c3a71e913a56cb13935b20f237e953dd750cbfd33ae59a33b6f36656db34 not found: ID does not exist" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.430559 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.445687 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.461623 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.462299 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-notification-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462324 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-notification-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.462348 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-central-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462358 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-central-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.462390 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="proxy-httpd" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462399 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="proxy-httpd" Dec 05 12:48:57 crc kubenswrapper[4784]: E1205 12:48:57.462426 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="sg-core" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462434 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="sg-core" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462666 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="proxy-httpd" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462694 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-central-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462717 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="sg-core" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.462729 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" containerName="ceilometer-notification-agent" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.465433 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.468410 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.468630 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.468791 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.472872 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.547344 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-run-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.547402 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-config-data\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.547640 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-scripts\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.547868 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.548008 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.548101 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-log-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.548156 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6g85\" (UniqueName: \"kubernetes.io/projected/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-kube-api-access-l6g85\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.548183 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.649944 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650000 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-log-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650028 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6g85\" (UniqueName: \"kubernetes.io/projected/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-kube-api-access-l6g85\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650048 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650102 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-run-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650134 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-config-data\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-scripts\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650280 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650801 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-log-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.650922 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-run-httpd\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.656064 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.656342 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.656351 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-scripts\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.656485 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-config-data\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.658156 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.665829 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6g85\" (UniqueName: \"kubernetes.io/projected/435a1cb2-bd53-4b3f-906d-7fc3de9553fb-kube-api-access-l6g85\") pod \"ceilometer-0\" (UID: \"435a1cb2-bd53-4b3f-906d-7fc3de9553fb\") " pod="openstack/ceilometer-0" Dec 05 12:48:57 crc kubenswrapper[4784]: I1205 12:48:57.790700 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 12:48:58 crc kubenswrapper[4784]: I1205 12:48:58.295182 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 12:48:59 crc kubenswrapper[4784]: I1205 12:48:59.011843 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff391e4-6d41-4a89-aeb6-fe47077b3e77" path="/var/lib/kubelet/pods/bff391e4-6d41-4a89-aeb6-fe47077b3e77/volumes" Dec 05 12:48:59 crc kubenswrapper[4784]: I1205 12:48:59.125254 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"435a1cb2-bd53-4b3f-906d-7fc3de9553fb","Type":"ContainerStarted","Data":"777a08bfcc533e258bab34217b1328f2659ee0eb77a0b3a59e372c6991c15894"} Dec 05 12:48:59 crc kubenswrapper[4784]: I1205 12:48:59.125308 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"435a1cb2-bd53-4b3f-906d-7fc3de9553fb","Type":"ContainerStarted","Data":"d717437910c93d9d2568879808ee962446e10526f12d5b81d76de7e0416ed7b8"} Dec 05 12:48:59 crc kubenswrapper[4784]: I1205 12:48:59.125323 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"435a1cb2-bd53-4b3f-906d-7fc3de9553fb","Type":"ContainerStarted","Data":"8eb88398ef7781d8396f01b5f7e51be42bdd13059a83d341d3f0386a85208d96"} Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.150472 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"435a1cb2-bd53-4b3f-906d-7fc3de9553fb","Type":"ContainerStarted","Data":"5f92b16ea9a3a69cb30b790f4508dcf35c3a047c7335752685dd8f57f7fa743c"} Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.389578 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.419766 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.608390 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.674178 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:49:00 crc kubenswrapper[4784]: I1205 12:49:00.674436 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="dnsmasq-dns" containerID="cri-o://2e617565b3792a54c876383e7f6ff5c9114998fc38d0f222dbd35e548d33cbf8" gracePeriod=10 Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.176610 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"435a1cb2-bd53-4b3f-906d-7fc3de9553fb","Type":"ContainerStarted","Data":"cfe12d12c664071d0a9e1772ffaca324796ef5dce707e17cfd8daa242183f309"} Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.177291 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.185239 4784 generic.go:334] "Generic (PLEG): container finished" podID="099d447c-80ed-4714-9da1-48d1a35c1079" containerID="2e617565b3792a54c876383e7f6ff5c9114998fc38d0f222dbd35e548d33cbf8" exitCode=0 Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.186463 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" event={"ID":"099d447c-80ed-4714-9da1-48d1a35c1079","Type":"ContainerDied","Data":"2e617565b3792a54c876383e7f6ff5c9114998fc38d0f222dbd35e548d33cbf8"} Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.217511 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.678237471 podStartE2EDuration="4.217485071s" podCreationTimestamp="2025-12-05 12:48:57 +0000 UTC" firstStartedPulling="2025-12-05 12:48:58.31764782 +0000 UTC m=+1417.737714645" lastFinishedPulling="2025-12-05 12:49:00.85689543 +0000 UTC m=+1420.276962245" observedRunningTime="2025-12-05 12:49:01.202495734 +0000 UTC m=+1420.622562559" watchObservedRunningTime="2025-12-05 12:49:01.217485071 +0000 UTC m=+1420.637551886" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.222806 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.274368 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.332403 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pt8j\" (UniqueName: \"kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.332542 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.332872 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.332904 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.332947 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.333053 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0\") pod \"099d447c-80ed-4714-9da1-48d1a35c1079\" (UID: \"099d447c-80ed-4714-9da1-48d1a35c1079\") " Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.351179 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j" (OuterVolumeSpecName: "kube-api-access-8pt8j") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "kube-api-access-8pt8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.407279 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.412609 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.415342 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.432655 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.438681 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.438726 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pt8j\" (UniqueName: \"kubernetes.io/projected/099d447c-80ed-4714-9da1-48d1a35c1079-kube-api-access-8pt8j\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.438741 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.438756 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.438768 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.439849 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config" (OuterVolumeSpecName: "config") pod "099d447c-80ed-4714-9da1-48d1a35c1079" (UID: "099d447c-80ed-4714-9da1-48d1a35c1079"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.442922 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-lqbxc"] Dec 05 12:49:01 crc kubenswrapper[4784]: E1205 12:49:01.443408 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="init" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.443430 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="init" Dec 05 12:49:01 crc kubenswrapper[4784]: E1205 12:49:01.443461 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="dnsmasq-dns" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.443469 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="dnsmasq-dns" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.443699 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" containerName="dnsmasq-dns" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.444468 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.446144 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.446280 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.454819 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lqbxc"] Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.540479 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.542405 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.542437 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.542645 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5tx5\" (UniqueName: \"kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.542938 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/099d447c-80ed-4714-9da1-48d1a35c1079-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.645035 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.645819 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5tx5\" (UniqueName: \"kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.645980 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.646105 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.649769 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.650215 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.651114 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.663848 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5tx5\" (UniqueName: \"kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5\") pod \"nova-cell1-cell-mapping-lqbxc\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:01 crc kubenswrapper[4784]: I1205 12:49:01.763642 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.198619 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.198909 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7998f99df9-vq7lx" event={"ID":"099d447c-80ed-4714-9da1-48d1a35c1079","Type":"ContainerDied","Data":"c060b043e24c416d0b251bf5d8e99b71bc49c5b3b570c2886c9b700f2bcf56cb"} Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.198971 4784 scope.go:117] "RemoveContainer" containerID="2e617565b3792a54c876383e7f6ff5c9114998fc38d0f222dbd35e548d33cbf8" Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.252529 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.254358 4784 scope.go:117] "RemoveContainer" containerID="308d26b709675ad4489e290b7c1835596ce190919800ae6b87742129a0bae82d" Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.267254 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7998f99df9-vq7lx"] Dec 05 12:49:02 crc kubenswrapper[4784]: I1205 12:49:02.421001 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lqbxc"] Dec 05 12:49:02 crc kubenswrapper[4784]: W1205 12:49:02.428031 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc32bb62c_16f9_429c_a3b8_1edf96d0261c.slice/crio-45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920 WatchSource:0}: Error finding container 45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920: Status 404 returned error can't find the container with id 45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920 Dec 05 12:49:03 crc kubenswrapper[4784]: I1205 12:49:03.014040 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="099d447c-80ed-4714-9da1-48d1a35c1079" path="/var/lib/kubelet/pods/099d447c-80ed-4714-9da1-48d1a35c1079/volumes" Dec 05 12:49:03 crc kubenswrapper[4784]: I1205 12:49:03.211241 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lqbxc" event={"ID":"c32bb62c-16f9-429c-a3b8-1edf96d0261c","Type":"ContainerStarted","Data":"4223db0146f6cfea315d03b2cbe6017284202579bbed8698ffb35e3e5fb1566c"} Dec 05 12:49:03 crc kubenswrapper[4784]: I1205 12:49:03.211285 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lqbxc" event={"ID":"c32bb62c-16f9-429c-a3b8-1edf96d0261c","Type":"ContainerStarted","Data":"45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920"} Dec 05 12:49:03 crc kubenswrapper[4784]: I1205 12:49:03.247879 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-lqbxc" podStartSLOduration=2.247861839 podStartE2EDuration="2.247861839s" podCreationTimestamp="2025-12-05 12:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:49:03.232110178 +0000 UTC m=+1422.652176993" watchObservedRunningTime="2025-12-05 12:49:03.247861839 +0000 UTC m=+1422.667928654" Dec 05 12:49:05 crc kubenswrapper[4784]: I1205 12:49:05.493206 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:49:05 crc kubenswrapper[4784]: I1205 12:49:05.493785 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:49:06 crc kubenswrapper[4784]: I1205 12:49:06.505408 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:06 crc kubenswrapper[4784]: I1205 12:49:06.505467 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:08 crc kubenswrapper[4784]: I1205 12:49:08.320656 4784 generic.go:334] "Generic (PLEG): container finished" podID="c32bb62c-16f9-429c-a3b8-1edf96d0261c" containerID="4223db0146f6cfea315d03b2cbe6017284202579bbed8698ffb35e3e5fb1566c" exitCode=0 Dec 05 12:49:08 crc kubenswrapper[4784]: I1205 12:49:08.320866 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lqbxc" event={"ID":"c32bb62c-16f9-429c-a3b8-1edf96d0261c","Type":"ContainerDied","Data":"4223db0146f6cfea315d03b2cbe6017284202579bbed8698ffb35e3e5fb1566c"} Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.704094 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.816953 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data\") pod \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.817077 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5tx5\" (UniqueName: \"kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5\") pod \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.817153 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle\") pod \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.817284 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts\") pod \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\" (UID: \"c32bb62c-16f9-429c-a3b8-1edf96d0261c\") " Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.824460 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts" (OuterVolumeSpecName: "scripts") pod "c32bb62c-16f9-429c-a3b8-1edf96d0261c" (UID: "c32bb62c-16f9-429c-a3b8-1edf96d0261c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.824516 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5" (OuterVolumeSpecName: "kube-api-access-j5tx5") pod "c32bb62c-16f9-429c-a3b8-1edf96d0261c" (UID: "c32bb62c-16f9-429c-a3b8-1edf96d0261c"). InnerVolumeSpecName "kube-api-access-j5tx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.853196 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c32bb62c-16f9-429c-a3b8-1edf96d0261c" (UID: "c32bb62c-16f9-429c-a3b8-1edf96d0261c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.865863 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data" (OuterVolumeSpecName: "config-data") pod "c32bb62c-16f9-429c-a3b8-1edf96d0261c" (UID: "c32bb62c-16f9-429c-a3b8-1edf96d0261c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.918919 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.918959 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5tx5\" (UniqueName: \"kubernetes.io/projected/c32bb62c-16f9-429c-a3b8-1edf96d0261c-kube-api-access-j5tx5\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.918973 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:09 crc kubenswrapper[4784]: I1205 12:49:09.918985 4784 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32bb62c-16f9-429c-a3b8-1edf96d0261c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.339255 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lqbxc" event={"ID":"c32bb62c-16f9-429c-a3b8-1edf96d0261c","Type":"ContainerDied","Data":"45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920"} Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.339309 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45c21068179e8e1d7ab9a0f7603a082fc9bd1dd8fcb3a75622b4055f0f3ab920" Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.339341 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lqbxc" Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.571784 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.572357 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-log" containerID="cri-o://a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75" gracePeriod=30 Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.573094 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-api" containerID="cri-o://49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894" gracePeriod=30 Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.624750 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.625034 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerName="nova-scheduler-scheduler" containerID="cri-o://03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" gracePeriod=30 Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.651065 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.651484 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" containerID="cri-o://680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34" gracePeriod=30 Dec 05 12:49:10 crc kubenswrapper[4784]: I1205 12:49:10.651636 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" containerID="cri-o://c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725" gracePeriod=30 Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.355988 4784 generic.go:334] "Generic (PLEG): container finished" podID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerID="680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34" exitCode=143 Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.356065 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerDied","Data":"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34"} Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.358320 4784 generic.go:334] "Generic (PLEG): container finished" podID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerID="a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75" exitCode=143 Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.358360 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerDied","Data":"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75"} Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.468436 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": read tcp 10.217.0.2:60228->10.217.0.216:8775: read: connection reset by peer" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.468524 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": read tcp 10.217.0.2:60226->10.217.0.216:8775: read: connection reset by peer" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.819598 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.858849 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.859114 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.859297 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.859409 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtw47\" (UniqueName: \"kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.859614 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.859704 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs\") pod \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\" (UID: \"2ed8203f-6bb3-43d8-b034-e2ea5b285a87\") " Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.860321 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs" (OuterVolumeSpecName: "logs") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.885028 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47" (OuterVolumeSpecName: "kube-api-access-dtw47") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "kube-api-access-dtw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.893352 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.908358 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data" (OuterVolumeSpecName: "config-data") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.919240 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.930448 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2ed8203f-6bb3-43d8-b034-e2ea5b285a87" (UID: "2ed8203f-6bb3-43d8-b034-e2ea5b285a87"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963341 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtw47\" (UniqueName: \"kubernetes.io/projected/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-kube-api-access-dtw47\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963384 4784 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963396 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963406 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963417 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:11 crc kubenswrapper[4784]: I1205 12:49:11.963427 4784 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ed8203f-6bb3-43d8-b034-e2ea5b285a87-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.040923 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.067883 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs\") pod \"97a264d6-629e-457a-8e0b-7d0ce2255a93\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.068032 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle\") pod \"97a264d6-629e-457a-8e0b-7d0ce2255a93\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.068084 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data\") pod \"97a264d6-629e-457a-8e0b-7d0ce2255a93\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.068117 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs\") pod \"97a264d6-629e-457a-8e0b-7d0ce2255a93\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.068169 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84nmt\" (UniqueName: \"kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt\") pod \"97a264d6-629e-457a-8e0b-7d0ce2255a93\" (UID: \"97a264d6-629e-457a-8e0b-7d0ce2255a93\") " Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.070402 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs" (OuterVolumeSpecName: "logs") pod "97a264d6-629e-457a-8e0b-7d0ce2255a93" (UID: "97a264d6-629e-457a-8e0b-7d0ce2255a93"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.078053 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt" (OuterVolumeSpecName: "kube-api-access-84nmt") pod "97a264d6-629e-457a-8e0b-7d0ce2255a93" (UID: "97a264d6-629e-457a-8e0b-7d0ce2255a93"). InnerVolumeSpecName "kube-api-access-84nmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.106588 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97a264d6-629e-457a-8e0b-7d0ce2255a93" (UID: "97a264d6-629e-457a-8e0b-7d0ce2255a93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.133423 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "97a264d6-629e-457a-8e0b-7d0ce2255a93" (UID: "97a264d6-629e-457a-8e0b-7d0ce2255a93"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.146957 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data" (OuterVolumeSpecName: "config-data") pod "97a264d6-629e-457a-8e0b-7d0ce2255a93" (UID: "97a264d6-629e-457a-8e0b-7d0ce2255a93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.170794 4784 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.170858 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.170874 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a264d6-629e-457a-8e0b-7d0ce2255a93-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.170892 4784 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a264d6-629e-457a-8e0b-7d0ce2255a93-logs\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.170952 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84nmt\" (UniqueName: \"kubernetes.io/projected/97a264d6-629e-457a-8e0b-7d0ce2255a93-kube-api-access-84nmt\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.368930 4784 generic.go:334] "Generic (PLEG): container finished" podID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerID="c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725" exitCode=0 Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.368999 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerDied","Data":"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725"} Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.369012 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.369029 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97a264d6-629e-457a-8e0b-7d0ce2255a93","Type":"ContainerDied","Data":"c75fdba268b6f9ffcdac6ee7a07a63b211908d13915d143a690151e39ffd9dff"} Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.369048 4784 scope.go:117] "RemoveContainer" containerID="c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.370756 4784 generic.go:334] "Generic (PLEG): container finished" podID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerID="49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894" exitCode=0 Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.370782 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerDied","Data":"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894"} Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.370797 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2ed8203f-6bb3-43d8-b034-e2ea5b285a87","Type":"ContainerDied","Data":"391738962f869693610c0c5479fbf505fabc6cc831538a320b9199c6622e2d18"} Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.370817 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.394807 4784 scope.go:117] "RemoveContainer" containerID="680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.420806 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.431399 4784 scope.go:117] "RemoveContainer" containerID="c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.431868 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725\": container with ID starting with c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725 not found: ID does not exist" containerID="c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.431923 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725"} err="failed to get container status \"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725\": rpc error: code = NotFound desc = could not find container \"c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725\": container with ID starting with c396f9742ecca74e1c9af0ec43004bf02ee9135899286e731b51f84064f2a725 not found: ID does not exist" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.431956 4784 scope.go:117] "RemoveContainer" containerID="680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.432331 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34\": container with ID starting with 680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34 not found: ID does not exist" containerID="680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.432398 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34"} err="failed to get container status \"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34\": rpc error: code = NotFound desc = could not find container \"680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34\": container with ID starting with 680adb8ce40cd0fe43342f3e9a4e3898de62fe3df2d3650a119e258822c70d34 not found: ID does not exist" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.432425 4784 scope.go:117] "RemoveContainer" containerID="49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.433614 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.451258 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.467066 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.477472 4784 scope.go:117] "RemoveContainer" containerID="a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.477631 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.478869 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.478964 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.478994 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-api" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.479003 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-api" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.479021 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32bb62c-16f9-429c-a3b8-1edf96d0261c" containerName="nova-manage" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.479716 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32bb62c-16f9-429c-a3b8-1edf96d0261c" containerName="nova-manage" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.479768 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-log" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.479779 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-log" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.479803 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.479811 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.481286 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-metadata" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.481328 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" containerName="nova-metadata-log" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.481345 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-api" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.481361 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32bb62c-16f9-429c-a3b8-1edf96d0261c" containerName="nova-manage" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.481387 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" containerName="nova-api-log" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.489696 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.490984 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.492883 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.498178 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.502669 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.504762 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.506611 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.507220 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.508332 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.515649 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.527415 4784 scope.go:117] "RemoveContainer" containerID="49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.528620 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894\": container with ID starting with 49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894 not found: ID does not exist" containerID="49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.528650 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894"} err="failed to get container status \"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894\": rpc error: code = NotFound desc = could not find container \"49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894\": container with ID starting with 49b7ff8898952860a7fcf0dc532c152f3ad041e1d5cc3fddb6ffb2deb9358894 not found: ID does not exist" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.528674 4784 scope.go:117] "RemoveContainer" containerID="a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75" Dec 05 12:49:12 crc kubenswrapper[4784]: E1205 12:49:12.528874 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75\": container with ID starting with a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75 not found: ID does not exist" containerID="a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.528890 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75"} err="failed to get container status \"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75\": rpc error: code = NotFound desc = could not find container \"a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75\": container with ID starting with a70ea080880d8c0e75a8a57f7adea09c257962f15063c9b75aa86a62716e1c75 not found: ID does not exist" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.579763 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6btd\" (UniqueName: \"kubernetes.io/projected/e099c7b6-c61f-4426-a17a-ca13ca695a1e-kube-api-access-b6btd\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.579799 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580042 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4ctl\" (UniqueName: \"kubernetes.io/projected/e693ebe6-ec98-4906-9a85-25a5a8a3c871-kube-api-access-l4ctl\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580085 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-public-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580261 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-config-data\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580324 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e099c7b6-c61f-4426-a17a-ca13ca695a1e-logs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580408 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580474 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-config-data\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580529 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580578 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e693ebe6-ec98-4906-9a85-25a5a8a3c871-logs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.580619 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682422 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4ctl\" (UniqueName: \"kubernetes.io/projected/e693ebe6-ec98-4906-9a85-25a5a8a3c871-kube-api-access-l4ctl\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682478 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-public-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682547 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-config-data\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682578 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e099c7b6-c61f-4426-a17a-ca13ca695a1e-logs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682617 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682654 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-config-data\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682679 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682720 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e693ebe6-ec98-4906-9a85-25a5a8a3c871-logs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682755 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682812 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6btd\" (UniqueName: \"kubernetes.io/projected/e099c7b6-c61f-4426-a17a-ca13ca695a1e-kube-api-access-b6btd\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.682832 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.683556 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e099c7b6-c61f-4426-a17a-ca13ca695a1e-logs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.683769 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e693ebe6-ec98-4906-9a85-25a5a8a3c871-logs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.688598 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.689261 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.691482 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.691709 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.692526 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-config-data\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.693369 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e099c7b6-c61f-4426-a17a-ca13ca695a1e-config-data\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.694368 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e693ebe6-ec98-4906-9a85-25a5a8a3c871-public-tls-certs\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.702912 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6btd\" (UniqueName: \"kubernetes.io/projected/e099c7b6-c61f-4426-a17a-ca13ca695a1e-kube-api-access-b6btd\") pod \"nova-metadata-0\" (UID: \"e099c7b6-c61f-4426-a17a-ca13ca695a1e\") " pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.705787 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4ctl\" (UniqueName: \"kubernetes.io/projected/e693ebe6-ec98-4906-9a85-25a5a8a3c871-kube-api-access-l4ctl\") pod \"nova-api-0\" (UID: \"e693ebe6-ec98-4906-9a85-25a5a8a3c871\") " pod="openstack/nova-api-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.840018 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 12:49:12 crc kubenswrapper[4784]: I1205 12:49:12.848728 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 12:49:13 crc kubenswrapper[4784]: I1205 12:49:13.031819 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ed8203f-6bb3-43d8-b034-e2ea5b285a87" path="/var/lib/kubelet/pods/2ed8203f-6bb3-43d8-b034-e2ea5b285a87/volumes" Dec 05 12:49:13 crc kubenswrapper[4784]: I1205 12:49:13.032485 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97a264d6-629e-457a-8e0b-7d0ce2255a93" path="/var/lib/kubelet/pods/97a264d6-629e-457a-8e0b-7d0ce2255a93/volumes" Dec 05 12:49:13 crc kubenswrapper[4784]: E1205 12:49:13.318106 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf is running failed: container process not found" containerID="03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:49:13 crc kubenswrapper[4784]: E1205 12:49:13.318779 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf is running failed: container process not found" containerID="03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:49:13 crc kubenswrapper[4784]: E1205 12:49:13.319375 4784 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf is running failed: container process not found" containerID="03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 12:49:13 crc kubenswrapper[4784]: E1205 12:49:13.319414 4784 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerName="nova-scheduler-scheduler" Dec 05 12:49:13 crc kubenswrapper[4784]: I1205 12:49:13.385863 4784 generic.go:334] "Generic (PLEG): container finished" podID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerID="03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" exitCode=0 Dec 05 12:49:13 crc kubenswrapper[4784]: I1205 12:49:13.385898 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ff15091d-0644-490f-968b-bbbba4cd5d99","Type":"ContainerDied","Data":"03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf"} Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.084065 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.132408 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.279842 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.325354 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data\") pod \"ff15091d-0644-490f-968b-bbbba4cd5d99\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.325816 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle\") pod \"ff15091d-0644-490f-968b-bbbba4cd5d99\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.325929 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zll2f\" (UniqueName: \"kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f\") pod \"ff15091d-0644-490f-968b-bbbba4cd5d99\" (UID: \"ff15091d-0644-490f-968b-bbbba4cd5d99\") " Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.334384 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f" (OuterVolumeSpecName: "kube-api-access-zll2f") pod "ff15091d-0644-490f-968b-bbbba4cd5d99" (UID: "ff15091d-0644-490f-968b-bbbba4cd5d99"). InnerVolumeSpecName "kube-api-access-zll2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.378387 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data" (OuterVolumeSpecName: "config-data") pod "ff15091d-0644-490f-968b-bbbba4cd5d99" (UID: "ff15091d-0644-490f-968b-bbbba4cd5d99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.397561 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e099c7b6-c61f-4426-a17a-ca13ca695a1e","Type":"ContainerStarted","Data":"753b015a68248318ae4994d7bd8e149e0d057efab40a9d959792934c636e45a9"} Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.399052 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e693ebe6-ec98-4906-9a85-25a5a8a3c871","Type":"ContainerStarted","Data":"42db00120f2fd8b045661e7cec51cec9980a26031245f0e21ae08f8561ed2c96"} Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.401727 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ff15091d-0644-490f-968b-bbbba4cd5d99","Type":"ContainerDied","Data":"b6bdec982730f5a1e683e9927843c4a9bdd4ad07c73cb5be955cc09da93f3f4d"} Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.401780 4784 scope.go:117] "RemoveContainer" containerID="03f79a3971dd550575340b736326264267f084d0c58634f07689b55e51e503bf" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.402166 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.403278 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff15091d-0644-490f-968b-bbbba4cd5d99" (UID: "ff15091d-0644-490f-968b-bbbba4cd5d99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.429230 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.430300 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff15091d-0644-490f-968b-bbbba4cd5d99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.430355 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zll2f\" (UniqueName: \"kubernetes.io/projected/ff15091d-0644-490f-968b-bbbba4cd5d99-kube-api-access-zll2f\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.741814 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.751739 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.760562 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: E1205 12:49:14.761126 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerName="nova-scheduler-scheduler" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.761148 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerName="nova-scheduler-scheduler" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.761396 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" containerName="nova-scheduler-scheduler" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.762322 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.764742 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.770173 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.836709 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-config-data\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.837085 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.837168 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqdgp\" (UniqueName: \"kubernetes.io/projected/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-kube-api-access-fqdgp\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.939323 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.939416 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqdgp\" (UniqueName: \"kubernetes.io/projected/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-kube-api-access-fqdgp\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.939451 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-config-data\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.943003 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.944067 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-config-data\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:14 crc kubenswrapper[4784]: I1205 12:49:14.956749 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqdgp\" (UniqueName: \"kubernetes.io/projected/df7dbd5c-eb86-4431-8cdd-59b57dcfc381-kube-api-access-fqdgp\") pod \"nova-scheduler-0\" (UID: \"df7dbd5c-eb86-4431-8cdd-59b57dcfc381\") " pod="openstack/nova-scheduler-0" Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.011406 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff15091d-0644-490f-968b-bbbba4cd5d99" path="/var/lib/kubelet/pods/ff15091d-0644-490f-968b-bbbba4cd5d99/volumes" Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.093499 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.414710 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e099c7b6-c61f-4426-a17a-ca13ca695a1e","Type":"ContainerStarted","Data":"79547dab793161ab8644c5b5938a1d8e1d0a9b1bcb2de9d2f177df345321d5dd"} Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.414758 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e099c7b6-c61f-4426-a17a-ca13ca695a1e","Type":"ContainerStarted","Data":"94dba20ce55f0e3dc8daef2b1ca0377dd4ffc512fe6d035cdb53bdab51462d87"} Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.418562 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e693ebe6-ec98-4906-9a85-25a5a8a3c871","Type":"ContainerStarted","Data":"82476507e507d8fb401bd2858e7decb38ecfb2d6ab6da7c5ab87f0284cbaf944"} Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.418613 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e693ebe6-ec98-4906-9a85-25a5a8a3c871","Type":"ContainerStarted","Data":"e2597805c846c7b9a74e63ee2cd88b18e9a24c0ffe0577b7552c1c20d766cacd"} Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.442424 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.442402891 podStartE2EDuration="3.442402891s" podCreationTimestamp="2025-12-05 12:49:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:49:15.431157769 +0000 UTC m=+1434.851224614" watchObservedRunningTime="2025-12-05 12:49:15.442402891 +0000 UTC m=+1434.862469716" Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.466017 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.465994478 podStartE2EDuration="3.465994478s" podCreationTimestamp="2025-12-05 12:49:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:49:15.453797617 +0000 UTC m=+1434.873864462" watchObservedRunningTime="2025-12-05 12:49:15.465994478 +0000 UTC m=+1434.886061303" Dec 05 12:49:15 crc kubenswrapper[4784]: I1205 12:49:15.594973 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 12:49:16 crc kubenswrapper[4784]: I1205 12:49:16.451284 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df7dbd5c-eb86-4431-8cdd-59b57dcfc381","Type":"ContainerStarted","Data":"802de1f2d36ff02b3e8ae6796309688b01696a0f889795e8ea63f70796facbef"} Dec 05 12:49:16 crc kubenswrapper[4784]: I1205 12:49:16.451788 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df7dbd5c-eb86-4431-8cdd-59b57dcfc381","Type":"ContainerStarted","Data":"5f55c0a7bbe1faf31bde6235933d02bde20fca6a9964e985eb86cf0f33dcdb09"} Dec 05 12:49:16 crc kubenswrapper[4784]: I1205 12:49:16.478740 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.478712364 podStartE2EDuration="2.478712364s" podCreationTimestamp="2025-12-05 12:49:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:49:16.473854123 +0000 UTC m=+1435.893921018" watchObservedRunningTime="2025-12-05 12:49:16.478712364 +0000 UTC m=+1435.898779219" Dec 05 12:49:17 crc kubenswrapper[4784]: I1205 12:49:17.840667 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:49:17 crc kubenswrapper[4784]: I1205 12:49:17.840920 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 12:49:20 crc kubenswrapper[4784]: I1205 12:49:20.094027 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 12:49:22 crc kubenswrapper[4784]: I1205 12:49:22.840699 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:49:22 crc kubenswrapper[4784]: I1205 12:49:22.841988 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 12:49:22 crc kubenswrapper[4784]: I1205 12:49:22.850003 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:49:22 crc kubenswrapper[4784]: I1205 12:49:22.850092 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 12:49:23 crc kubenswrapper[4784]: I1205 12:49:23.889428 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e099c7b6-c61f-4426-a17a-ca13ca695a1e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:23 crc kubenswrapper[4784]: I1205 12:49:23.889541 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e099c7b6-c61f-4426-a17a-ca13ca695a1e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.226:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:23 crc kubenswrapper[4784]: I1205 12:49:23.903486 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e693ebe6-ec98-4906-9a85-25a5a8a3c871" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.227:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:23 crc kubenswrapper[4784]: I1205 12:49:23.903897 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e693ebe6-ec98-4906-9a85-25a5a8a3c871" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.227:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 12:49:25 crc kubenswrapper[4784]: I1205 12:49:25.094171 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 12:49:25 crc kubenswrapper[4784]: I1205 12:49:25.129595 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 12:49:25 crc kubenswrapper[4784]: I1205 12:49:25.588045 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 12:49:27 crc kubenswrapper[4784]: I1205 12:49:27.807092 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.846143 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.847344 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.852285 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.856895 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.857624 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.857818 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:49:32 crc kubenswrapper[4784]: I1205 12:49:32.876019 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:49:33 crc kubenswrapper[4784]: I1205 12:49:33.661867 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 12:49:33 crc kubenswrapper[4784]: I1205 12:49:33.669999 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 12:49:33 crc kubenswrapper[4784]: I1205 12:49:33.671862 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 12:49:42 crc kubenswrapper[4784]: I1205 12:49:42.105262 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:42 crc kubenswrapper[4784]: I1205 12:49:42.920870 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:45 crc kubenswrapper[4784]: I1205 12:49:45.431461 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="rabbitmq" containerID="cri-o://37011c59b3fd9ddce0a34429d3b2358d1770fa3a06b3677f682fba41637f0a5b" gracePeriod=604797 Dec 05 12:49:46 crc kubenswrapper[4784]: I1205 12:49:46.025060 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="rabbitmq" containerID="cri-o://d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc" gracePeriod=604797 Dec 05 12:49:46 crc kubenswrapper[4784]: I1205 12:49:46.800996 4784 generic.go:334] "Generic (PLEG): container finished" podID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerID="37011c59b3fd9ddce0a34429d3b2358d1770fa3a06b3677f682fba41637f0a5b" exitCode=0 Dec 05 12:49:46 crc kubenswrapper[4784]: I1205 12:49:46.801393 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerDied","Data":"37011c59b3fd9ddce0a34429d3b2358d1770fa3a06b3677f682fba41637f0a5b"} Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.073453 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208301 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208420 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208490 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208604 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvp96\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208625 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208670 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208710 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208735 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208773 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208805 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.208820 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\" (UID: \"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.209315 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.210011 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.210070 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.217803 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.218803 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info" (OuterVolumeSpecName: "pod-info") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.222993 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.227459 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96" (OuterVolumeSpecName: "kube-api-access-dvp96") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "kube-api-access-dvp96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.257334 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data" (OuterVolumeSpecName: "config-data") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.272612 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311304 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvp96\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-kube-api-access-dvp96\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311339 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311351 4784 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311364 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311375 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311386 4784 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311415 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311428 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.311439 4784 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.314305 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf" (OuterVolumeSpecName: "server-conf") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.337320 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" (UID: "b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.338356 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.413270 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.413303 4784 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.413315 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.655891 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717099 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717227 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717312 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717384 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717468 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv5vh\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717504 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717528 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717557 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717594 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717625 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.717655 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins\") pod \"e9889e9e-8ec4-44aa-a829-327920ab827f\" (UID: \"e9889e9e-8ec4-44aa-a829-327920ab827f\") " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.718511 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.728013 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.742568 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.771840 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.784832 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info" (OuterVolumeSpecName: "pod-info") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.792650 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.793544 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.805796 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh" (OuterVolumeSpecName: "kube-api-access-cv5vh") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "kube-api-access-cv5vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820489 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820526 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv5vh\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-kube-api-access-cv5vh\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820537 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820546 4784 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e9889e9e-8ec4-44aa-a829-327920ab827f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820555 4784 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e9889e9e-8ec4-44aa-a829-327920ab827f-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820563 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820573 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.820581 4784 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.894701 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf" (OuterVolumeSpecName: "server-conf") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.923526 4784 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.940157 4784 generic.go:334] "Generic (PLEG): container finished" podID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerID="d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc" exitCode=0 Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.940398 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.941489 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerDied","Data":"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc"} Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.941525 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e9889e9e-8ec4-44aa-a829-327920ab827f","Type":"ContainerDied","Data":"49af8e3875e1ff9d2041e2c3c8c3c2983967017b0e17cf9854be4f9767d78587"} Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.941567 4784 scope.go:117] "RemoveContainer" containerID="d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.966313 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b","Type":"ContainerDied","Data":"f855e2c926d456e2afb4acedb0b5783ce2a787ab6a2f06dcbcb51b447ecf68e6"} Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.966414 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.968133 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 12:49:47 crc kubenswrapper[4784]: I1205 12:49:47.981817 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data" (OuterVolumeSpecName: "config-data") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.002176 4784 scope.go:117] "RemoveContainer" containerID="ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.026862 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.026893 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e9889e9e-8ec4-44aa-a829-327920ab827f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.056869 4784 scope.go:117] "RemoveContainer" containerID="d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc" Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.057688 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc\": container with ID starting with d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc not found: ID does not exist" containerID="d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.057729 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc"} err="failed to get container status \"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc\": rpc error: code = NotFound desc = could not find container \"d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc\": container with ID starting with d02604ada5f4a9b3e1740a49a9fb699d416f5d768de2e3c0a242995b1dcc93dc not found: ID does not exist" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.057755 4784 scope.go:117] "RemoveContainer" containerID="ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.057857 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e9889e9e-8ec4-44aa-a829-327920ab827f" (UID: "e9889e9e-8ec4-44aa-a829-327920ab827f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.058257 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce\": container with ID starting with ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce not found: ID does not exist" containerID="ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.058278 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce"} err="failed to get container status \"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce\": rpc error: code = NotFound desc = could not find container \"ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce\": container with ID starting with ae813c619b4059c1fac1dd894044aaadfb3d9881e07c852a6891523597c9bbce not found: ID does not exist" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.058291 4784 scope.go:117] "RemoveContainer" containerID="37011c59b3fd9ddce0a34429d3b2358d1770fa3a06b3677f682fba41637f0a5b" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.062741 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.073100 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.082294 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.085946 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.085983 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.086018 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="setup-container" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.086027 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="setup-container" Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.086035 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.086041 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: E1205 12:49:48.086066 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="setup-container" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.086073 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="setup-container" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.086336 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.086372 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" containerName="rabbitmq" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.097711 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.100083 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.101095 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.101301 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.102207 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.102588 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-zlsh7" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.103696 4784 scope.go:117] "RemoveContainer" containerID="22363b649441c80e6ba0100af0b361bd0e296e9bee6f6dfe1d164d8d100e3153" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.103874 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.104053 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.113241 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.128499 4784 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e9889e9e-8ec4-44aa-a829-327920ab827f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.230931 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.230994 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231033 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231054 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231281 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231347 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231369 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-config-data\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231424 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231678 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r52j4\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-kube-api-access-r52j4\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231729 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.231783 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.279233 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.297424 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.312294 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.314071 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.316169 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.316370 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.316483 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.317024 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.317351 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.317559 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-2h67b" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.317691 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.326331 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.334949 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335073 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r52j4\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-kube-api-access-r52j4\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335104 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335141 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335228 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335256 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335294 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335317 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335378 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335406 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.335427 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-config-data\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.336516 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-config-data\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.338075 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.338853 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.339506 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.341828 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.342658 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.346624 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.348327 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.348700 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.350647 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.363773 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r52j4\" (UniqueName: \"kubernetes.io/projected/1a3bb70f-0aad-4f14-809e-1f39b78c97b8-kube-api-access-r52j4\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.385343 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"1a3bb70f-0aad-4f14-809e-1f39b78c97b8\") " pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.423956 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437136 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437261 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437329 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437409 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437444 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437494 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437677 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437824 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437875 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwdsq\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-kube-api-access-pwdsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437960 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.437992 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.540525 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.540896 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.540931 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwdsq\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-kube-api-access-pwdsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.540977 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541007 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541048 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541081 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541115 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541183 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541331 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.541373 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.542490 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.542694 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.543097 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.543164 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.543585 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.543849 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.545358 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.549870 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.551167 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.558579 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.568449 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwdsq\" (UniqueName: \"kubernetes.io/projected/94f3bf83-4b17-4dbc-aed9-b0541983c0b8-kube-api-access-pwdsq\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.600106 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"94f3bf83-4b17-4dbc-aed9-b0541983c0b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.633634 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.887694 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 12:49:48 crc kubenswrapper[4784]: I1205 12:49:48.984683 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1a3bb70f-0aad-4f14-809e-1f39b78c97b8","Type":"ContainerStarted","Data":"e70ce0dfbad86a21c1e8504bc37fad18ee492be593f835c5a1a3e3983174cd0d"} Dec 05 12:49:49 crc kubenswrapper[4784]: I1205 12:49:49.014328 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b" path="/var/lib/kubelet/pods/b9fd9278-0aaf-4bf3-8753-f21a2fe15f3b/volumes" Dec 05 12:49:49 crc kubenswrapper[4784]: I1205 12:49:49.015997 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9889e9e-8ec4-44aa-a829-327920ab827f" path="/var/lib/kubelet/pods/e9889e9e-8ec4-44aa-a829-327920ab827f/volumes" Dec 05 12:49:49 crc kubenswrapper[4784]: I1205 12:49:49.125799 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 12:49:50 crc kubenswrapper[4784]: I1205 12:49:49.999747 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"94f3bf83-4b17-4dbc-aed9-b0541983c0b8","Type":"ContainerStarted","Data":"bd5c69126e7c00306bf6df13c40fe2c6ee2b3e1b591d4c8612bd1234846ff41a"} Dec 05 12:49:51 crc kubenswrapper[4784]: I1205 12:49:51.018316 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1a3bb70f-0aad-4f14-809e-1f39b78c97b8","Type":"ContainerStarted","Data":"297aaad636978d492a98954373055b46465f8c8cd8ba3c551aa5dde0e35a21e9"} Dec 05 12:49:51 crc kubenswrapper[4784]: I1205 12:49:51.019098 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"94f3bf83-4b17-4dbc-aed9-b0541983c0b8","Type":"ContainerStarted","Data":"34631d0f50bdceac6bda07cedd65110cec8defe1a16533dd52823382738e2830"} Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.767951 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.770284 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.776114 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.786754 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915487 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915541 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25sw4\" (UniqueName: \"kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915570 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915609 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915727 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915788 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:56 crc kubenswrapper[4784]: I1205 12:49:56.915816 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018244 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25sw4\" (UniqueName: \"kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018271 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018309 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018434 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018491 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.018522 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.019141 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.019163 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.019568 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.019768 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.019808 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.020259 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.042058 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25sw4\" (UniqueName: \"kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4\") pod \"dnsmasq-dns-5d5d69c675-5xh8d\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.095634 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:57 crc kubenswrapper[4784]: I1205 12:49:57.560419 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:49:58 crc kubenswrapper[4784]: I1205 12:49:58.087758 4784 generic.go:334] "Generic (PLEG): container finished" podID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerID="d429f8e576a87219e72979ad510b25bb20f2033eafae69265e04616168a0db2e" exitCode=0 Dec 05 12:49:58 crc kubenswrapper[4784]: I1205 12:49:58.088138 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" event={"ID":"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b","Type":"ContainerDied","Data":"d429f8e576a87219e72979ad510b25bb20f2033eafae69265e04616168a0db2e"} Dec 05 12:49:58 crc kubenswrapper[4784]: I1205 12:49:58.088226 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" event={"ID":"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b","Type":"ContainerStarted","Data":"6f44c365c7790b54cab29b2ae1a50e241a5fea0a555f393589b7d892c50cd939"} Dec 05 12:49:59 crc kubenswrapper[4784]: I1205 12:49:59.104845 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" event={"ID":"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b","Type":"ContainerStarted","Data":"71e01c7a7c0b9a03eade199a6d9c092e6be85aa8153e328e22b810a9dbe93e98"} Dec 05 12:49:59 crc kubenswrapper[4784]: I1205 12:49:59.105216 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:49:59 crc kubenswrapper[4784]: I1205 12:49:59.137133 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" podStartSLOduration=3.137099692 podStartE2EDuration="3.137099692s" podCreationTimestamp="2025-12-05 12:49:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:49:59.128925305 +0000 UTC m=+1478.548992150" watchObservedRunningTime="2025-12-05 12:49:59.137099692 +0000 UTC m=+1478.557166547" Dec 05 12:49:59 crc kubenswrapper[4784]: I1205 12:49:59.572380 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:49:59 crc kubenswrapper[4784]: I1205 12:49:59.572722 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.822931 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.825586 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.898925 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.899032 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qws7n\" (UniqueName: \"kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.899067 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:05 crc kubenswrapper[4784]: I1205 12:50:05.976806 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.000159 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qws7n\" (UniqueName: \"kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.000218 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.000330 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.001157 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.001903 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.021917 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qws7n\" (UniqueName: \"kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n\") pod \"certified-operators-z5hfj\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.159124 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:06 crc kubenswrapper[4784]: I1205 12:50:06.699267 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:06 crc kubenswrapper[4784]: W1205 12:50:06.706120 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6ff04d0_052e_46b7_aeec_4e2ea4338b11.slice/crio-e0ae4767f1c3823b062c32668a810bb51c1d4b30ce11f6c7b6585d22683515c4 WatchSource:0}: Error finding container e0ae4767f1c3823b062c32668a810bb51c1d4b30ce11f6c7b6585d22683515c4: Status 404 returned error can't find the container with id e0ae4767f1c3823b062c32668a810bb51c1d4b30ce11f6c7b6585d22683515c4 Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.097551 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.166027 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.166683 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="dnsmasq-dns" containerID="cri-o://c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a" gracePeriod=10 Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.199272 4784 generic.go:334] "Generic (PLEG): container finished" podID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerID="f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f" exitCode=0 Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.199330 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerDied","Data":"f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f"} Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.199359 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerStarted","Data":"e0ae4767f1c3823b062c32668a810bb51c1d4b30ce11f6c7b6585d22683515c4"} Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.390952 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fb487c899-nw4wf"] Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.402809 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.416643 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb487c899-nw4wf"] Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535313 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-swift-storage-0\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535384 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535431 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-config\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535491 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535534 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9nq8\" (UniqueName: \"kubernetes.io/projected/4d1b8599-38bc-4f76-aaa6-4a18929bffba-kube-api-access-w9nq8\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535559 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-svc\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.535931 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-openstack-edpm-ipam\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.637982 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638046 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9nq8\" (UniqueName: \"kubernetes.io/projected/4d1b8599-38bc-4f76-aaa6-4a18929bffba-kube-api-access-w9nq8\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638072 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-svc\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638159 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-openstack-edpm-ipam\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638226 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-swift-storage-0\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638256 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.638399 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-config\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.639771 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-openstack-edpm-ipam\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.640125 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.640314 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-swift-storage-0\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.640471 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.640518 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-dns-svc\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.642084 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1b8599-38bc-4f76-aaa6-4a18929bffba-config\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.661533 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9nq8\" (UniqueName: \"kubernetes.io/projected/4d1b8599-38bc-4f76-aaa6-4a18929bffba-kube-api-access-w9nq8\") pod \"dnsmasq-dns-5fb487c899-nw4wf\" (UID: \"4d1b8599-38bc-4f76-aaa6-4a18929bffba\") " pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.732272 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.746068 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843101 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843334 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843405 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843477 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdzvf\" (UniqueName: \"kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843522 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.843571 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc\") pod \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\" (UID: \"0387d9db-dbb7-404e-991b-f67a7f02a1bf\") " Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.849277 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf" (OuterVolumeSpecName: "kube-api-access-mdzvf") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "kube-api-access-mdzvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.905318 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.916704 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.920971 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.924744 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.937447 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config" (OuterVolumeSpecName: "config") pod "0387d9db-dbb7-404e-991b-f67a7f02a1bf" (UID: "0387d9db-dbb7-404e-991b-f67a7f02a1bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.945943 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.945994 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.946008 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.946021 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdzvf\" (UniqueName: \"kubernetes.io/projected/0387d9db-dbb7-404e-991b-f67a7f02a1bf-kube-api-access-mdzvf\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.946034 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:07 crc kubenswrapper[4784]: I1205 12:50:07.946048 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0387d9db-dbb7-404e-991b-f67a7f02a1bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.213549 4784 generic.go:334] "Generic (PLEG): container finished" podID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerID="c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a" exitCode=0 Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.213621 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.213641 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" event={"ID":"0387d9db-dbb7-404e-991b-f67a7f02a1bf","Type":"ContainerDied","Data":"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a"} Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.214014 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbd6cd5c-xzczx" event={"ID":"0387d9db-dbb7-404e-991b-f67a7f02a1bf","Type":"ContainerDied","Data":"8c2e179c09a3fc0bf08d2f1257b43ad15e554a9b9edff3aec6185eebf89314f6"} Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.214036 4784 scope.go:117] "RemoveContainer" containerID="c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.241935 4784 scope.go:117] "RemoveContainer" containerID="a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec" Dec 05 12:50:08 crc kubenswrapper[4784]: W1205 12:50:08.256253 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d1b8599_38bc_4f76_aaa6_4a18929bffba.slice/crio-59b047bbb91e86e56ae5ebbbba482fa05e7c11a3d6c3ba84610516fba542afc4 WatchSource:0}: Error finding container 59b047bbb91e86e56ae5ebbbba482fa05e7c11a3d6c3ba84610516fba542afc4: Status 404 returned error can't find the container with id 59b047bbb91e86e56ae5ebbbba482fa05e7c11a3d6c3ba84610516fba542afc4 Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.257102 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb487c899-nw4wf"] Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.267096 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.277485 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54fbd6cd5c-xzczx"] Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.289929 4784 scope.go:117] "RemoveContainer" containerID="c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a" Dec 05 12:50:08 crc kubenswrapper[4784]: E1205 12:50:08.290733 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a\": container with ID starting with c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a not found: ID does not exist" containerID="c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.290804 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a"} err="failed to get container status \"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a\": rpc error: code = NotFound desc = could not find container \"c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a\": container with ID starting with c00ec124e15b8ce5e296b4f83511187a5157ce5d367105f4006dafdc8aa7cf0a not found: ID does not exist" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.290847 4784 scope.go:117] "RemoveContainer" containerID="a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec" Dec 05 12:50:08 crc kubenswrapper[4784]: E1205 12:50:08.291756 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec\": container with ID starting with a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec not found: ID does not exist" containerID="a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec" Dec 05 12:50:08 crc kubenswrapper[4784]: I1205 12:50:08.291827 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec"} err="failed to get container status \"a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec\": rpc error: code = NotFound desc = could not find container \"a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec\": container with ID starting with a5c904bcbfa7920c35d530096a3d0c60bdc0c8be3fd4bffcbdb3389d990e6fec not found: ID does not exist" Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.009988 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" path="/var/lib/kubelet/pods/0387d9db-dbb7-404e-991b-f67a7f02a1bf/volumes" Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.222936 4784 generic.go:334] "Generic (PLEG): container finished" podID="4d1b8599-38bc-4f76-aaa6-4a18929bffba" containerID="f275779a10ad000aee7e205b511020dff5187b0e69bd483d365bc92900964b66" exitCode=0 Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.223031 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" event={"ID":"4d1b8599-38bc-4f76-aaa6-4a18929bffba","Type":"ContainerDied","Data":"f275779a10ad000aee7e205b511020dff5187b0e69bd483d365bc92900964b66"} Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.223092 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" event={"ID":"4d1b8599-38bc-4f76-aaa6-4a18929bffba","Type":"ContainerStarted","Data":"59b047bbb91e86e56ae5ebbbba482fa05e7c11a3d6c3ba84610516fba542afc4"} Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.227859 4784 generic.go:334] "Generic (PLEG): container finished" podID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerID="d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012" exitCode=0 Dec 05 12:50:09 crc kubenswrapper[4784]: I1205 12:50:09.227890 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerDied","Data":"d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012"} Dec 05 12:50:10 crc kubenswrapper[4784]: I1205 12:50:10.246578 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerStarted","Data":"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7"} Dec 05 12:50:10 crc kubenswrapper[4784]: I1205 12:50:10.251263 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" event={"ID":"4d1b8599-38bc-4f76-aaa6-4a18929bffba","Type":"ContainerStarted","Data":"cf9fbc8e12e9f2a87bd7ca9833c6c3c8ac7e63dc028bc668bf1ad15fb4dcf99a"} Dec 05 12:50:10 crc kubenswrapper[4784]: I1205 12:50:10.251573 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:10 crc kubenswrapper[4784]: I1205 12:50:10.271127 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z5hfj" podStartSLOduration=2.838273468 podStartE2EDuration="5.271110944s" podCreationTimestamp="2025-12-05 12:50:05 +0000 UTC" firstStartedPulling="2025-12-05 12:50:07.201324204 +0000 UTC m=+1486.621391019" lastFinishedPulling="2025-12-05 12:50:09.63416167 +0000 UTC m=+1489.054228495" observedRunningTime="2025-12-05 12:50:10.264730345 +0000 UTC m=+1489.684797170" watchObservedRunningTime="2025-12-05 12:50:10.271110944 +0000 UTC m=+1489.691177759" Dec 05 12:50:10 crc kubenswrapper[4784]: I1205 12:50:10.288395 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" podStartSLOduration=3.288373492 podStartE2EDuration="3.288373492s" podCreationTimestamp="2025-12-05 12:50:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:50:10.283289923 +0000 UTC m=+1489.703356738" watchObservedRunningTime="2025-12-05 12:50:10.288373492 +0000 UTC m=+1489.708440317" Dec 05 12:50:16 crc kubenswrapper[4784]: I1205 12:50:16.159529 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:16 crc kubenswrapper[4784]: I1205 12:50:16.160693 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:16 crc kubenswrapper[4784]: I1205 12:50:16.237934 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:16 crc kubenswrapper[4784]: I1205 12:50:16.371159 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:16 crc kubenswrapper[4784]: I1205 12:50:16.493163 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:17 crc kubenswrapper[4784]: I1205 12:50:17.734135 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fb487c899-nw4wf" Dec 05 12:50:17 crc kubenswrapper[4784]: I1205 12:50:17.806402 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:50:17 crc kubenswrapper[4784]: I1205 12:50:17.806706 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="dnsmasq-dns" containerID="cri-o://71e01c7a7c0b9a03eade199a6d9c092e6be85aa8153e328e22b810a9dbe93e98" gracePeriod=10 Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.336734 4784 generic.go:334] "Generic (PLEG): container finished" podID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerID="71e01c7a7c0b9a03eade199a6d9c092e6be85aa8153e328e22b810a9dbe93e98" exitCode=0 Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.337308 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z5hfj" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="registry-server" containerID="cri-o://719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7" gracePeriod=2 Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.337594 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" event={"ID":"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b","Type":"ContainerDied","Data":"71e01c7a7c0b9a03eade199a6d9c092e6be85aa8153e328e22b810a9dbe93e98"} Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.337623 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" event={"ID":"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b","Type":"ContainerDied","Data":"6f44c365c7790b54cab29b2ae1a50e241a5fea0a555f393589b7d892c50cd939"} Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.337634 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f44c365c7790b54cab29b2ae1a50e241a5fea0a555f393589b7d892c50cd939" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.347726 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476595 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476645 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476726 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25sw4\" (UniqueName: \"kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476900 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476953 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.476991 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.477052 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config\") pod \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\" (UID: \"a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.485799 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4" (OuterVolumeSpecName: "kube-api-access-25sw4") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "kube-api-access-25sw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.544606 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.545143 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.545790 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.556150 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.561845 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config" (OuterVolumeSpecName: "config") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.569803 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" (UID: "a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579257 4784 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579325 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579345 4784 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579356 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-config\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579390 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579403 4784 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.579412 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25sw4\" (UniqueName: \"kubernetes.io/projected/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b-kube-api-access-25sw4\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.774765 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.883649 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content\") pod \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.883990 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qws7n\" (UniqueName: \"kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n\") pod \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.884030 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities\") pod \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\" (UID: \"a6ff04d0-052e-46b7-aeec-4e2ea4338b11\") " Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.884726 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities" (OuterVolumeSpecName: "utilities") pod "a6ff04d0-052e-46b7-aeec-4e2ea4338b11" (UID: "a6ff04d0-052e-46b7-aeec-4e2ea4338b11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.888496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n" (OuterVolumeSpecName: "kube-api-access-qws7n") pod "a6ff04d0-052e-46b7-aeec-4e2ea4338b11" (UID: "a6ff04d0-052e-46b7-aeec-4e2ea4338b11"). InnerVolumeSpecName "kube-api-access-qws7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.987727 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qws7n\" (UniqueName: \"kubernetes.io/projected/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-kube-api-access-qws7n\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:18 crc kubenswrapper[4784]: I1205 12:50:18.987792 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.354462 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5hfj" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.354468 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerDied","Data":"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7"} Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.355154 4784 scope.go:117] "RemoveContainer" containerID="719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.354342 4784 generic.go:334] "Generic (PLEG): container finished" podID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerID="719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7" exitCode=0 Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.356389 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5hfj" event={"ID":"a6ff04d0-052e-46b7-aeec-4e2ea4338b11","Type":"ContainerDied","Data":"e0ae4767f1c3823b062c32668a810bb51c1d4b30ce11f6c7b6585d22683515c4"} Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.356443 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d5d69c675-5xh8d" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.386460 4784 scope.go:117] "RemoveContainer" containerID="d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.400293 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.409675 4784 scope.go:117] "RemoveContainer" containerID="f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.415804 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d5d69c675-5xh8d"] Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.455026 4784 scope.go:117] "RemoveContainer" containerID="719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7" Dec 05 12:50:19 crc kubenswrapper[4784]: E1205 12:50:19.455862 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7\": container with ID starting with 719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7 not found: ID does not exist" containerID="719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.455915 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7"} err="failed to get container status \"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7\": rpc error: code = NotFound desc = could not find container \"719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7\": container with ID starting with 719ed70d1d4f48fc4d3db715a51a17e0da2e164375700a54ccc2b26ef9b774e7 not found: ID does not exist" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.455951 4784 scope.go:117] "RemoveContainer" containerID="d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012" Dec 05 12:50:19 crc kubenswrapper[4784]: E1205 12:50:19.456287 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012\": container with ID starting with d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012 not found: ID does not exist" containerID="d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.456325 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012"} err="failed to get container status \"d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012\": rpc error: code = NotFound desc = could not find container \"d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012\": container with ID starting with d9768f295ffe585fb0eb31c600596d90ec3e272ccf70efa5644b5d2b1af10012 not found: ID does not exist" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.456347 4784 scope.go:117] "RemoveContainer" containerID="f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f" Dec 05 12:50:19 crc kubenswrapper[4784]: E1205 12:50:19.456720 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f\": container with ID starting with f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f not found: ID does not exist" containerID="f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.456748 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f"} err="failed to get container status \"f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f\": rpc error: code = NotFound desc = could not find container \"f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f\": container with ID starting with f0915f7efc9fc2c78a4b94f530785be545830a65736ae8830d3506db8a02cd2f not found: ID does not exist" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.473652 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6ff04d0-052e-46b7-aeec-4e2ea4338b11" (UID: "a6ff04d0-052e-46b7-aeec-4e2ea4338b11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.499249 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff04d0-052e-46b7-aeec-4e2ea4338b11-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.696586 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:19 crc kubenswrapper[4784]: I1205 12:50:19.707601 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z5hfj"] Dec 05 12:50:21 crc kubenswrapper[4784]: I1205 12:50:21.018694 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" path="/var/lib/kubelet/pods/a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b/volumes" Dec 05 12:50:21 crc kubenswrapper[4784]: I1205 12:50:21.021870 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" path="/var/lib/kubelet/pods/a6ff04d0-052e-46b7-aeec-4e2ea4338b11/volumes" Dec 05 12:50:24 crc kubenswrapper[4784]: I1205 12:50:24.412624 4784 generic.go:334] "Generic (PLEG): container finished" podID="94f3bf83-4b17-4dbc-aed9-b0541983c0b8" containerID="34631d0f50bdceac6bda07cedd65110cec8defe1a16533dd52823382738e2830" exitCode=0 Dec 05 12:50:24 crc kubenswrapper[4784]: I1205 12:50:24.412720 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"94f3bf83-4b17-4dbc-aed9-b0541983c0b8","Type":"ContainerDied","Data":"34631d0f50bdceac6bda07cedd65110cec8defe1a16533dd52823382738e2830"} Dec 05 12:50:24 crc kubenswrapper[4784]: I1205 12:50:24.416822 4784 generic.go:334] "Generic (PLEG): container finished" podID="1a3bb70f-0aad-4f14-809e-1f39b78c97b8" containerID="297aaad636978d492a98954373055b46465f8c8cd8ba3c551aa5dde0e35a21e9" exitCode=0 Dec 05 12:50:24 crc kubenswrapper[4784]: I1205 12:50:24.416859 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1a3bb70f-0aad-4f14-809e-1f39b78c97b8","Type":"ContainerDied","Data":"297aaad636978d492a98954373055b46465f8c8cd8ba3c551aa5dde0e35a21e9"} Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.433932 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1a3bb70f-0aad-4f14-809e-1f39b78c97b8","Type":"ContainerStarted","Data":"8726c6a3f2a98a0c3ec9cb6aec6d48f9490b3ab827660a8128adc57d7e634a4e"} Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.434520 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.437436 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"94f3bf83-4b17-4dbc-aed9-b0541983c0b8","Type":"ContainerStarted","Data":"411e426d2e2a75aff4098e466ddc66df4b32090dbbc8fb996e1227fd454abc54"} Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.437712 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.465009 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.464987594 podStartE2EDuration="37.464987594s" podCreationTimestamp="2025-12-05 12:49:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:50:25.461222157 +0000 UTC m=+1504.881288992" watchObservedRunningTime="2025-12-05 12:50:25.464987594 +0000 UTC m=+1504.885054409" Dec 05 12:50:25 crc kubenswrapper[4784]: I1205 12:50:25.501871 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.501796263 podStartE2EDuration="37.501796263s" podCreationTimestamp="2025-12-05 12:49:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 12:50:25.488684154 +0000 UTC m=+1504.908750979" watchObservedRunningTime="2025-12-05 12:50:25.501796263 +0000 UTC m=+1504.921863098" Dec 05 12:50:29 crc kubenswrapper[4784]: I1205 12:50:29.572835 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:50:29 crc kubenswrapper[4784]: I1205 12:50:29.573278 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.332471 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx"] Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335042 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335077 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335087 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="extract-content" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335094 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="extract-content" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335107 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="extract-utilities" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335115 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="extract-utilities" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335131 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335137 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335164 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="init" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335170 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="init" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335182 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="init" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335191 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="init" Dec 05 12:50:36 crc kubenswrapper[4784]: E1205 12:50:36.335217 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="registry-server" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335222 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="registry-server" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335468 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6ff04d0-052e-46b7-aeec-4e2ea4338b11" containerName="registry-server" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335485 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29f9571-3ebe-4c12-b82f-29fe6d0e0c4b" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.335493 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="0387d9db-dbb7-404e-991b-f67a7f02a1bf" containerName="dnsmasq-dns" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.336179 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.339071 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.339314 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.339413 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.339450 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.342362 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx"] Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.350040 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.350264 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.350371 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.350451 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npvvn\" (UniqueName: \"kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.451838 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.451921 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.451997 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.452042 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npvvn\" (UniqueName: \"kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.457931 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.457943 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.457975 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.473813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npvvn\" (UniqueName: \"kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:36 crc kubenswrapper[4784]: I1205 12:50:36.656265 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:50:37 crc kubenswrapper[4784]: I1205 12:50:37.336793 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx"] Dec 05 12:50:37 crc kubenswrapper[4784]: I1205 12:50:37.574452 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" event={"ID":"ced78d37-0ef6-4a75-903d-7db8946f38f4","Type":"ContainerStarted","Data":"0422cdff2aa2b6d0885724f8d870f9a4b7d3e5a846731741ad387c48a509251f"} Dec 05 12:50:38 crc kubenswrapper[4784]: I1205 12:50:38.426673 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="1a3bb70f-0aad-4f14-809e-1f39b78c97b8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.229:5671: connect: connection refused" Dec 05 12:50:38 crc kubenswrapper[4784]: I1205 12:50:38.635162 4784 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="94f3bf83-4b17-4dbc-aed9-b0541983c0b8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.230:5671: connect: connection refused" Dec 05 12:50:48 crc kubenswrapper[4784]: I1205 12:50:48.426398 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 12:50:48 crc kubenswrapper[4784]: I1205 12:50:48.637414 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 12:50:48 crc kubenswrapper[4784]: I1205 12:50:48.703717 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" event={"ID":"ced78d37-0ef6-4a75-903d-7db8946f38f4","Type":"ContainerStarted","Data":"cfa4f5d5c16e7e06d826b6bd849e364d59095ab4d7084be5f5dd7ac3c9b1028e"} Dec 05 12:50:48 crc kubenswrapper[4784]: I1205 12:50:48.740251 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" podStartSLOduration=2.569543623 podStartE2EDuration="12.740229803s" podCreationTimestamp="2025-12-05 12:50:36 +0000 UTC" firstStartedPulling="2025-12-05 12:50:37.342102146 +0000 UTC m=+1516.762168961" lastFinishedPulling="2025-12-05 12:50:47.512788286 +0000 UTC m=+1526.932855141" observedRunningTime="2025-12-05 12:50:48.731570454 +0000 UTC m=+1528.151637269" watchObservedRunningTime="2025-12-05 12:50:48.740229803 +0000 UTC m=+1528.160296618" Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.573054 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.573737 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.573794 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.574592 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.574648 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" gracePeriod=600 Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.828363 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" exitCode=0 Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.828410 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9"} Dec 05 12:50:59 crc kubenswrapper[4784]: I1205 12:50:59.828450 4784 scope.go:117] "RemoveContainer" containerID="babf0042920beaff6a1a6221d95064bf622413e3938841d914cae1798c6b7709" Dec 05 12:51:00 crc kubenswrapper[4784]: E1205 12:51:00.217011 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:51:00 crc kubenswrapper[4784]: I1205 12:51:00.850405 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:51:00 crc kubenswrapper[4784]: E1205 12:51:00.851580 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:51:02 crc kubenswrapper[4784]: E1205 12:51:02.429021 4784 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podced78d37_0ef6_4a75_903d_7db8946f38f4.slice/crio-cfa4f5d5c16e7e06d826b6bd849e364d59095ab4d7084be5f5dd7ac3c9b1028e.scope\": RecentStats: unable to find data in memory cache]" Dec 05 12:51:02 crc kubenswrapper[4784]: I1205 12:51:02.873081 4784 generic.go:334] "Generic (PLEG): container finished" podID="ced78d37-0ef6-4a75-903d-7db8946f38f4" containerID="cfa4f5d5c16e7e06d826b6bd849e364d59095ab4d7084be5f5dd7ac3c9b1028e" exitCode=0 Dec 05 12:51:02 crc kubenswrapper[4784]: I1205 12:51:02.873557 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" event={"ID":"ced78d37-0ef6-4a75-903d-7db8946f38f4","Type":"ContainerDied","Data":"cfa4f5d5c16e7e06d826b6bd849e364d59095ab4d7084be5f5dd7ac3c9b1028e"} Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.436488 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.576872 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npvvn\" (UniqueName: \"kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn\") pod \"ced78d37-0ef6-4a75-903d-7db8946f38f4\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.576993 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key\") pod \"ced78d37-0ef6-4a75-903d-7db8946f38f4\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.577038 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory\") pod \"ced78d37-0ef6-4a75-903d-7db8946f38f4\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.577164 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle\") pod \"ced78d37-0ef6-4a75-903d-7db8946f38f4\" (UID: \"ced78d37-0ef6-4a75-903d-7db8946f38f4\") " Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.582210 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ced78d37-0ef6-4a75-903d-7db8946f38f4" (UID: "ced78d37-0ef6-4a75-903d-7db8946f38f4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.583582 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn" (OuterVolumeSpecName: "kube-api-access-npvvn") pod "ced78d37-0ef6-4a75-903d-7db8946f38f4" (UID: "ced78d37-0ef6-4a75-903d-7db8946f38f4"). InnerVolumeSpecName "kube-api-access-npvvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.606349 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ced78d37-0ef6-4a75-903d-7db8946f38f4" (UID: "ced78d37-0ef6-4a75-903d-7db8946f38f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.607588 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory" (OuterVolumeSpecName: "inventory") pod "ced78d37-0ef6-4a75-903d-7db8946f38f4" (UID: "ced78d37-0ef6-4a75-903d-7db8946f38f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.679753 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npvvn\" (UniqueName: \"kubernetes.io/projected/ced78d37-0ef6-4a75-903d-7db8946f38f4-kube-api-access-npvvn\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.679797 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.679812 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.679826 4784 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced78d37-0ef6-4a75-903d-7db8946f38f4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.894436 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" event={"ID":"ced78d37-0ef6-4a75-903d-7db8946f38f4","Type":"ContainerDied","Data":"0422cdff2aa2b6d0885724f8d870f9a4b7d3e5a846731741ad387c48a509251f"} Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.894477 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0422cdff2aa2b6d0885724f8d870f9a4b7d3e5a846731741ad387c48a509251f" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.894779 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.977448 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k"] Dec 05 12:51:04 crc kubenswrapper[4784]: E1205 12:51:04.978125 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced78d37-0ef6-4a75-903d-7db8946f38f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.978155 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced78d37-0ef6-4a75-903d-7db8946f38f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.978858 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced78d37-0ef6-4a75-903d-7db8946f38f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.980062 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.982051 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.983024 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.983032 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:51:04 crc kubenswrapper[4784]: I1205 12:51:04.985170 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.039489 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k"] Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.086729 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.086773 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.086892 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpklf\" (UniqueName: \"kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.188902 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.189233 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.189442 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpklf\" (UniqueName: \"kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.193036 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.193719 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.211065 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpklf\" (UniqueName: \"kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-djr9k\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.308892 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.889812 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k"] Dec 05 12:51:05 crc kubenswrapper[4784]: I1205 12:51:05.906421 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" event={"ID":"e26d5696-c749-46ed-9f75-f07d0c46c076","Type":"ContainerStarted","Data":"8674da61defd69589c1343bd572078bc744a93fe449e37da14fbcd21ddbebd96"} Dec 05 12:51:06 crc kubenswrapper[4784]: I1205 12:51:06.919114 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" event={"ID":"e26d5696-c749-46ed-9f75-f07d0c46c076","Type":"ContainerStarted","Data":"3b6baa3de8f61f159448e87144a216548c019f53c289f1665af5a150099924dd"} Dec 05 12:51:06 crc kubenswrapper[4784]: I1205 12:51:06.941032 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" podStartSLOduration=2.252638114 podStartE2EDuration="2.941010993s" podCreationTimestamp="2025-12-05 12:51:04 +0000 UTC" firstStartedPulling="2025-12-05 12:51:05.897018989 +0000 UTC m=+1545.317085804" lastFinishedPulling="2025-12-05 12:51:06.585391868 +0000 UTC m=+1546.005458683" observedRunningTime="2025-12-05 12:51:06.934422498 +0000 UTC m=+1546.354489313" watchObservedRunningTime="2025-12-05 12:51:06.941010993 +0000 UTC m=+1546.361077808" Dec 05 12:51:09 crc kubenswrapper[4784]: I1205 12:51:09.961893 4784 generic.go:334] "Generic (PLEG): container finished" podID="e26d5696-c749-46ed-9f75-f07d0c46c076" containerID="3b6baa3de8f61f159448e87144a216548c019f53c289f1665af5a150099924dd" exitCode=0 Dec 05 12:51:09 crc kubenswrapper[4784]: I1205 12:51:09.962521 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" event={"ID":"e26d5696-c749-46ed-9f75-f07d0c46c076","Type":"ContainerDied","Data":"3b6baa3de8f61f159448e87144a216548c019f53c289f1665af5a150099924dd"} Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.448456 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.616980 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key\") pod \"e26d5696-c749-46ed-9f75-f07d0c46c076\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.617315 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory\") pod \"e26d5696-c749-46ed-9f75-f07d0c46c076\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.617449 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpklf\" (UniqueName: \"kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf\") pod \"e26d5696-c749-46ed-9f75-f07d0c46c076\" (UID: \"e26d5696-c749-46ed-9f75-f07d0c46c076\") " Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.622938 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf" (OuterVolumeSpecName: "kube-api-access-kpklf") pod "e26d5696-c749-46ed-9f75-f07d0c46c076" (UID: "e26d5696-c749-46ed-9f75-f07d0c46c076"). InnerVolumeSpecName "kube-api-access-kpklf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.648421 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e26d5696-c749-46ed-9f75-f07d0c46c076" (UID: "e26d5696-c749-46ed-9f75-f07d0c46c076"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.654829 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory" (OuterVolumeSpecName: "inventory") pod "e26d5696-c749-46ed-9f75-f07d0c46c076" (UID: "e26d5696-c749-46ed-9f75-f07d0c46c076"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.720140 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.720218 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e26d5696-c749-46ed-9f75-f07d0c46c076-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.720236 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpklf\" (UniqueName: \"kubernetes.io/projected/e26d5696-c749-46ed-9f75-f07d0c46c076-kube-api-access-kpklf\") on node \"crc\" DevicePath \"\"" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.984831 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" event={"ID":"e26d5696-c749-46ed-9f75-f07d0c46c076","Type":"ContainerDied","Data":"8674da61defd69589c1343bd572078bc744a93fe449e37da14fbcd21ddbebd96"} Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.985123 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8674da61defd69589c1343bd572078bc744a93fe449e37da14fbcd21ddbebd96" Dec 05 12:51:11 crc kubenswrapper[4784]: I1205 12:51:11.984960 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-djr9k" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.084301 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn"] Dec 05 12:51:12 crc kubenswrapper[4784]: E1205 12:51:12.084836 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26d5696-c749-46ed-9f75-f07d0c46c076" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.084862 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26d5696-c749-46ed-9f75-f07d0c46c076" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.085123 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26d5696-c749-46ed-9f75-f07d0c46c076" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.086045 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.088181 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.088223 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.088505 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.088673 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.100076 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn"] Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.231010 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.231071 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.231431 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p2qx\" (UniqueName: \"kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.231515 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.333157 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.333382 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p2qx\" (UniqueName: \"kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.333438 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.333500 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.337723 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.344988 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.346596 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.352550 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p2qx\" (UniqueName: \"kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:12 crc kubenswrapper[4784]: I1205 12:51:12.431852 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:51:13 crc kubenswrapper[4784]: I1205 12:51:13.118843 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn"] Dec 05 12:51:13 crc kubenswrapper[4784]: I1205 12:51:13.999076 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:51:13 crc kubenswrapper[4784]: E1205 12:51:13.999854 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:51:14 crc kubenswrapper[4784]: I1205 12:51:14.008840 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" event={"ID":"345a4940-4998-4cbc-bd5e-89bea1eec60b","Type":"ContainerStarted","Data":"7c81a56648d0be2142c1f4a07647f8a1ba226b778b1d12c724c1e7699763ef26"} Dec 05 12:51:14 crc kubenswrapper[4784]: I1205 12:51:14.009030 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" event={"ID":"345a4940-4998-4cbc-bd5e-89bea1eec60b","Type":"ContainerStarted","Data":"2ed4fe97ffd773bcf8c2e69149666a8b2e6dcab7904393206733977ad2981fa7"} Dec 05 12:51:14 crc kubenswrapper[4784]: I1205 12:51:14.026950 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" podStartSLOduration=1.629022509 podStartE2EDuration="2.026928744s" podCreationTimestamp="2025-12-05 12:51:12 +0000 UTC" firstStartedPulling="2025-12-05 12:51:13.12361012 +0000 UTC m=+1552.543676935" lastFinishedPulling="2025-12-05 12:51:13.521516355 +0000 UTC m=+1552.941583170" observedRunningTime="2025-12-05 12:51:14.024933762 +0000 UTC m=+1553.445000587" watchObservedRunningTime="2025-12-05 12:51:14.026928744 +0000 UTC m=+1553.446995569" Dec 05 12:51:25 crc kubenswrapper[4784]: I1205 12:51:25.999061 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:51:26 crc kubenswrapper[4784]: E1205 12:51:25.999949 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:51:35 crc kubenswrapper[4784]: I1205 12:51:35.865940 4784 scope.go:117] "RemoveContainer" containerID="587ae8b4ecbbf8c3a53dc103d722bdef7bb03047d9e1a3c4441922544763350f" Dec 05 12:51:35 crc kubenswrapper[4784]: I1205 12:51:35.896344 4784 scope.go:117] "RemoveContainer" containerID="b646fcb2b2c249a594e14c73e5dc74284417d3cad63669cec82c1e0dcca98dad" Dec 05 12:51:35 crc kubenswrapper[4784]: I1205 12:51:35.956980 4784 scope.go:117] "RemoveContainer" containerID="a1d669560b9ce310a44a6087662efd06d842726a425f5a3285b0aedb4cd31110" Dec 05 12:51:37 crc kubenswrapper[4784]: I1205 12:51:37.999358 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:51:38 crc kubenswrapper[4784]: E1205 12:51:37.999873 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:51:52 crc kubenswrapper[4784]: I1205 12:51:52.999044 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:51:53 crc kubenswrapper[4784]: E1205 12:51:52.999812 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:52:07 crc kubenswrapper[4784]: I1205 12:52:07.000746 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:52:07 crc kubenswrapper[4784]: E1205 12:52:07.001543 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:52:21 crc kubenswrapper[4784]: I1205 12:52:21.006099 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:52:21 crc kubenswrapper[4784]: E1205 12:52:21.006953 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:52:36 crc kubenswrapper[4784]: I1205 12:52:35.999606 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:52:36 crc kubenswrapper[4784]: E1205 12:52:36.001364 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:52:36 crc kubenswrapper[4784]: I1205 12:52:36.077178 4784 scope.go:117] "RemoveContainer" containerID="939b90befea8539870cbe36b6f5192fd9a0cac640ba20efb942874be42be9891" Dec 05 12:52:36 crc kubenswrapper[4784]: I1205 12:52:36.110539 4784 scope.go:117] "RemoveContainer" containerID="427abff3eac7473e838fa85f2097041270a6b80a6e6f065dfda211890bb77d19" Dec 05 12:52:36 crc kubenswrapper[4784]: I1205 12:52:36.139538 4784 scope.go:117] "RemoveContainer" containerID="77e2a7c42808e9e0e4c00d4af24858424deab4379fd3e42a566f5ef44e9abf18" Dec 05 12:52:36 crc kubenswrapper[4784]: I1205 12:52:36.180906 4784 scope.go:117] "RemoveContainer" containerID="773546a469ba89b933adcb75ad82aff9b851eb2d42eb822c7c78e5fe1ae30e3e" Dec 05 12:52:48 crc kubenswrapper[4784]: I1205 12:52:48.000807 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:52:48 crc kubenswrapper[4784]: E1205 12:52:48.002008 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:53:03 crc kubenswrapper[4784]: I1205 12:53:02.999798 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:53:03 crc kubenswrapper[4784]: E1205 12:53:03.000415 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:53:16 crc kubenswrapper[4784]: I1205 12:53:16.999245 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:53:17 crc kubenswrapper[4784]: E1205 12:53:17.000164 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:53:30 crc kubenswrapper[4784]: I1205 12:53:30.000059 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:53:30 crc kubenswrapper[4784]: E1205 12:53:30.000814 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:53:36 crc kubenswrapper[4784]: I1205 12:53:36.448171 4784 scope.go:117] "RemoveContainer" containerID="f6eec2743071ba7ac1b1d7ab9a27d25e0964ab1a8dfbb2d1f6f71d92a0402bbd" Dec 05 12:53:36 crc kubenswrapper[4784]: I1205 12:53:36.510805 4784 scope.go:117] "RemoveContainer" containerID="cd1af3671b6a033ab9b96d423079150fc10f5b063cd3b5401e93f8b094f1e25b" Dec 05 12:53:44 crc kubenswrapper[4784]: I1205 12:53:44.999317 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:53:45 crc kubenswrapper[4784]: E1205 12:53:44.999951 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:00 crc kubenswrapper[4784]: I1205 12:53:59.999598 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:54:00 crc kubenswrapper[4784]: E1205 12:54:00.000741 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:12 crc kubenswrapper[4784]: I1205 12:54:12.999105 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:54:13 crc kubenswrapper[4784]: E1205 12:54:12.999883 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:24 crc kubenswrapper[4784]: I1205 12:54:24.000282 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:54:24 crc kubenswrapper[4784]: E1205 12:54:24.001179 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:34 crc kubenswrapper[4784]: I1205 12:54:34.999215 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:54:35 crc kubenswrapper[4784]: E1205 12:54:35.000124 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:36 crc kubenswrapper[4784]: I1205 12:54:36.578066 4784 scope.go:117] "RemoveContainer" containerID="6934a0f2f73c3ac881bf5c89e07018e75a7fd91d74c96648f990ee733eb5c4ac" Dec 05 12:54:36 crc kubenswrapper[4784]: I1205 12:54:36.600724 4784 scope.go:117] "RemoveContainer" containerID="69ffb335fd935f734240e738cf53383a98bc03a86bdf32f2c155c97e7eb42895" Dec 05 12:54:36 crc kubenswrapper[4784]: I1205 12:54:36.625331 4784 scope.go:117] "RemoveContainer" containerID="02db3d322fbd098914b1cccdb042ddf34a90653fb6fcc25242c4df49b82e93bb" Dec 05 12:54:36 crc kubenswrapper[4784]: I1205 12:54:36.646234 4784 scope.go:117] "RemoveContainer" containerID="305bfc8117568f6360542668643027fb4ba3e66c87cfc9049987d3447fd32bf5" Dec 05 12:54:36 crc kubenswrapper[4784]: I1205 12:54:36.663262 4784 scope.go:117] "RemoveContainer" containerID="f08cd08f84cfaa38311c120526b32a39d012228084df3f6d36389eb1ec800f46" Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.065051 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-b8km7"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.075852 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-x99kz"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.084502 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-xwm49"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.093811 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-88ae-account-create-update-ht5wv"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.102648 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-9959-account-create-update-5k2wn"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.113385 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-01e4-account-create-update-x2qm9"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.122283 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-88ae-account-create-update-ht5wv"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.131003 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-01e4-account-create-update-x2qm9"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.139899 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-b8km7"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.148661 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-xwm49"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.158711 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-x99kz"] Dec 05 12:54:41 crc kubenswrapper[4784]: I1205 12:54:41.167671 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-9959-account-create-update-5k2wn"] Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.013934 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97d32e44-8c63-443a-b1b9-cc553a42c7dd" path="/var/lib/kubelet/pods/97d32e44-8c63-443a-b1b9-cc553a42c7dd/volumes" Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.015593 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6099b41-be7d-4790-9f8c-3581d99ce48e" path="/var/lib/kubelet/pods/a6099b41-be7d-4790-9f8c-3581d99ce48e/volumes" Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.017663 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b06854d3-4d35-46bb-b79f-ef5482b07eba" path="/var/lib/kubelet/pods/b06854d3-4d35-46bb-b79f-ef5482b07eba/volumes" Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.019259 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0c87cde-536a-436e-a823-50af03676501" path="/var/lib/kubelet/pods/e0c87cde-536a-436e-a823-50af03676501/volumes" Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.021740 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3f23d2c-8577-46a7-90f4-37e12278b111" path="/var/lib/kubelet/pods/f3f23d2c-8577-46a7-90f4-37e12278b111/volumes" Dec 05 12:54:43 crc kubenswrapper[4784]: I1205 12:54:43.023254 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83" path="/var/lib/kubelet/pods/ff6438fd-45a7-4d2f-ae2c-a27a1a13ae83/volumes" Dec 05 12:54:46 crc kubenswrapper[4784]: I1205 12:54:46.998949 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:54:47 crc kubenswrapper[4784]: E1205 12:54:46.999789 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:54:59 crc kubenswrapper[4784]: I1205 12:54:59.998546 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:55:00 crc kubenswrapper[4784]: E1205 12:54:59.999333 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:55:01 crc kubenswrapper[4784]: I1205 12:55:01.639222 4784 generic.go:334] "Generic (PLEG): container finished" podID="345a4940-4998-4cbc-bd5e-89bea1eec60b" containerID="7c81a56648d0be2142c1f4a07647f8a1ba226b778b1d12c724c1e7699763ef26" exitCode=0 Dec 05 12:55:01 crc kubenswrapper[4784]: I1205 12:55:01.639311 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" event={"ID":"345a4940-4998-4cbc-bd5e-89bea1eec60b","Type":"ContainerDied","Data":"7c81a56648d0be2142c1f4a07647f8a1ba226b778b1d12c724c1e7699763ef26"} Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.099286 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.223853 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory\") pod \"345a4940-4998-4cbc-bd5e-89bea1eec60b\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.223904 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key\") pod \"345a4940-4998-4cbc-bd5e-89bea1eec60b\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.223927 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle\") pod \"345a4940-4998-4cbc-bd5e-89bea1eec60b\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.224023 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p2qx\" (UniqueName: \"kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx\") pod \"345a4940-4998-4cbc-bd5e-89bea1eec60b\" (UID: \"345a4940-4998-4cbc-bd5e-89bea1eec60b\") " Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.229948 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "345a4940-4998-4cbc-bd5e-89bea1eec60b" (UID: "345a4940-4998-4cbc-bd5e-89bea1eec60b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.230020 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx" (OuterVolumeSpecName: "kube-api-access-5p2qx") pod "345a4940-4998-4cbc-bd5e-89bea1eec60b" (UID: "345a4940-4998-4cbc-bd5e-89bea1eec60b"). InnerVolumeSpecName "kube-api-access-5p2qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.252124 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "345a4940-4998-4cbc-bd5e-89bea1eec60b" (UID: "345a4940-4998-4cbc-bd5e-89bea1eec60b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.260585 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory" (OuterVolumeSpecName: "inventory") pod "345a4940-4998-4cbc-bd5e-89bea1eec60b" (UID: "345a4940-4998-4cbc-bd5e-89bea1eec60b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.325961 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.325992 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.326002 4784 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345a4940-4998-4cbc-bd5e-89bea1eec60b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.326014 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p2qx\" (UniqueName: \"kubernetes.io/projected/345a4940-4998-4cbc-bd5e-89bea1eec60b-kube-api-access-5p2qx\") on node \"crc\" DevicePath \"\"" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.661989 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" event={"ID":"345a4940-4998-4cbc-bd5e-89bea1eec60b","Type":"ContainerDied","Data":"2ed4fe97ffd773bcf8c2e69149666a8b2e6dcab7904393206733977ad2981fa7"} Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.662272 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ed4fe97ffd773bcf8c2e69149666a8b2e6dcab7904393206733977ad2981fa7" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.662339 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.767204 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw"] Dec 05 12:55:03 crc kubenswrapper[4784]: E1205 12:55:03.767871 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="345a4940-4998-4cbc-bd5e-89bea1eec60b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.767899 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="345a4940-4998-4cbc-bd5e-89bea1eec60b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.768265 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="345a4940-4998-4cbc-bd5e-89bea1eec60b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.769412 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.772758 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.773579 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.773964 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.774140 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.784345 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw"] Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.836105 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.836612 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.836906 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-499rb\" (UniqueName: \"kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.938320 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-499rb\" (UniqueName: \"kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.938488 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.938574 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.943649 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.944643 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:03 crc kubenswrapper[4784]: I1205 12:55:03.962120 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-499rb\" (UniqueName: \"kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:04 crc kubenswrapper[4784]: I1205 12:55:04.089747 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:55:04 crc kubenswrapper[4784]: I1205 12:55:04.639326 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw"] Dec 05 12:55:04 crc kubenswrapper[4784]: I1205 12:55:04.658669 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 12:55:04 crc kubenswrapper[4784]: I1205 12:55:04.672010 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" event={"ID":"a134d50c-87cd-4225-b873-1c6b1d2a0151","Type":"ContainerStarted","Data":"966d370474c4686ced4147a1e8b1fbae9a6ccd48c232f4edff4a515495eff6a0"} Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.045870 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-84546"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.060673 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-dnwsj"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.071697 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-7f4f-account-create-update-7mrkg"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.083140 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-657f-account-create-update-w28tq"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.093683 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-84546"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.103234 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-7f4f-account-create-update-7mrkg"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.112741 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-dnwsj"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.121993 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-657f-account-create-update-w28tq"] Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.684538 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" event={"ID":"a134d50c-87cd-4225-b873-1c6b1d2a0151","Type":"ContainerStarted","Data":"812d8ee055c89845af30c513e5d6c8416eda18fce96fa2f55fddae0b9e1ed641"} Dec 05 12:55:05 crc kubenswrapper[4784]: I1205 12:55:05.709131 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" podStartSLOduration=2.293872957 podStartE2EDuration="2.709111637s" podCreationTimestamp="2025-12-05 12:55:03 +0000 UTC" firstStartedPulling="2025-12-05 12:55:04.658398598 +0000 UTC m=+1784.078465413" lastFinishedPulling="2025-12-05 12:55:05.073637278 +0000 UTC m=+1784.493704093" observedRunningTime="2025-12-05 12:55:05.704223815 +0000 UTC m=+1785.124290630" watchObservedRunningTime="2025-12-05 12:55:05.709111637 +0000 UTC m=+1785.129178452" Dec 05 12:55:07 crc kubenswrapper[4784]: I1205 12:55:07.016073 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1985ab81-9425-43c9-9395-2efcdd2b336d" path="/var/lib/kubelet/pods/1985ab81-9425-43c9-9395-2efcdd2b336d/volumes" Dec 05 12:55:07 crc kubenswrapper[4784]: I1205 12:55:07.017288 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fb37670-5391-491a-9141-a113fdbd8650" path="/var/lib/kubelet/pods/7fb37670-5391-491a-9141-a113fdbd8650/volumes" Dec 05 12:55:07 crc kubenswrapper[4784]: I1205 12:55:07.018097 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df5a977b-4e06-442e-90cc-ccb94cf0929b" path="/var/lib/kubelet/pods/df5a977b-4e06-442e-90cc-ccb94cf0929b/volumes" Dec 05 12:55:07 crc kubenswrapper[4784]: I1205 12:55:07.018939 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc12a9ba-c259-43c6-ac41-c3c0bc2ac019" path="/var/lib/kubelet/pods/fc12a9ba-c259-43c6-ac41-c3c0bc2ac019/volumes" Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.036375 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-xmxxf"] Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.047770 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5940-account-create-update-9bnkr"] Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.055551 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-xmxxf"] Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.062544 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-bc4gb"] Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.069353 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5940-account-create-update-9bnkr"] Dec 05 12:55:11 crc kubenswrapper[4784]: I1205 12:55:11.076616 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-bc4gb"] Dec 05 12:55:12 crc kubenswrapper[4784]: I1205 12:55:12.034520 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b15b-account-create-update-9zgxk"] Dec 05 12:55:12 crc kubenswrapper[4784]: I1205 12:55:12.046398 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b15b-account-create-update-9zgxk"] Dec 05 12:55:13 crc kubenswrapper[4784]: I1205 12:55:13.015418 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22340d7d-4dcb-4d3e-b7b8-9e388587b3aa" path="/var/lib/kubelet/pods/22340d7d-4dcb-4d3e-b7b8-9e388587b3aa/volumes" Dec 05 12:55:13 crc kubenswrapper[4784]: I1205 12:55:13.016392 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39de58d0-af8e-4869-8f7c-34c94a412b1a" path="/var/lib/kubelet/pods/39de58d0-af8e-4869-8f7c-34c94a412b1a/volumes" Dec 05 12:55:13 crc kubenswrapper[4784]: I1205 12:55:13.017475 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ff23b53-bbeb-467c-9d74-713682fa23a4" path="/var/lib/kubelet/pods/9ff23b53-bbeb-467c-9d74-713682fa23a4/volumes" Dec 05 12:55:13 crc kubenswrapper[4784]: I1205 12:55:13.019132 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6b3acb2-88da-4560-9648-ee6d3a5d6d60" path="/var/lib/kubelet/pods/c6b3acb2-88da-4560-9648-ee6d3a5d6d60/volumes" Dec 05 12:55:15 crc kubenswrapper[4784]: I1205 12:55:14.999645 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:55:15 crc kubenswrapper[4784]: E1205 12:55:15.000445 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:55:26 crc kubenswrapper[4784]: I1205 12:55:26.998877 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:55:27 crc kubenswrapper[4784]: E1205 12:55:26.999792 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:55:28 crc kubenswrapper[4784]: I1205 12:55:28.037001 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-8rhvb"] Dec 05 12:55:28 crc kubenswrapper[4784]: I1205 12:55:28.053358 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gkqx2"] Dec 05 12:55:28 crc kubenswrapper[4784]: I1205 12:55:28.069648 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gkqx2"] Dec 05 12:55:28 crc kubenswrapper[4784]: I1205 12:55:28.080935 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-8rhvb"] Dec 05 12:55:29 crc kubenswrapper[4784]: I1205 12:55:29.029037 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44208e80-4774-4f47-93a8-6e23b7402949" path="/var/lib/kubelet/pods/44208e80-4774-4f47-93a8-6e23b7402949/volumes" Dec 05 12:55:29 crc kubenswrapper[4784]: I1205 12:55:29.030784 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffc17758-4586-495d-842c-68c47898b1c1" path="/var/lib/kubelet/pods/ffc17758-4586-495d-842c-68c47898b1c1/volumes" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.747821 4784 scope.go:117] "RemoveContainer" containerID="366ac7830f6ed494c3ba9244c6f9804d3ca8a735a17623f227023de7fa89790c" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.770399 4784 scope.go:117] "RemoveContainer" containerID="9252f9844134d6ffdf9ed49ab4709bca71ea8a3a9595b8dc724bf08dd0f48a79" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.829099 4784 scope.go:117] "RemoveContainer" containerID="7f8ceb121e8b5f4a850cd12af9c3e210962322abb391083b3945f3c71f2721b7" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.888276 4784 scope.go:117] "RemoveContainer" containerID="594e5d2bca0815304adce02e83f13aa60ca80f4f338b64667fbe90e9279dd576" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.938864 4784 scope.go:117] "RemoveContainer" containerID="dffa1009a27a348d4f855d55ba2b22811736eb53fab9b217ed86b0afe6194869" Dec 05 12:55:36 crc kubenswrapper[4784]: I1205 12:55:36.992493 4784 scope.go:117] "RemoveContainer" containerID="cfc9985e60fdb8705e5e2a3e48dde0147801fc5f9f5fd0efc8f20e441fe4218a" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.060302 4784 scope.go:117] "RemoveContainer" containerID="a4d9a80bb0266730a2fc5857ab6ad8e4a0e60c4c5da7d2a71152769cd188fdd2" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.098378 4784 scope.go:117] "RemoveContainer" containerID="5d722700524a5a2acca8a637f339e3c6cbecc372730f62cb7ef7dc163c7fd4ee" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.126368 4784 scope.go:117] "RemoveContainer" containerID="10ca6a2bd934927607a83a8a991544af85cc05b07f5a305e7c9955b56c97fa3a" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.168233 4784 scope.go:117] "RemoveContainer" containerID="9ba8edb6af990a5c3afa3fba997d46f949d07f6c3f4b12dd5335de31d74b31ef" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.200828 4784 scope.go:117] "RemoveContainer" containerID="12601a7ddeed319eda128ab845fbde09254b66bb0dc49de473fdfa5dd463f26c" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.241500 4784 scope.go:117] "RemoveContainer" containerID="887fa74fe35e37d939f6e4d6202ed121b49fcf8e3d853e51cd05511c108f7e56" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.274538 4784 scope.go:117] "RemoveContainer" containerID="6e2a893e3e5a3dbadc736398e703e136874ad4643634f2fe8c722f57069c60a7" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.307505 4784 scope.go:117] "RemoveContainer" containerID="60b3c15bb993555975059ae32e06a6d5526bdaa264e716bdd2713eab3b46a248" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.327751 4784 scope.go:117] "RemoveContainer" containerID="792a414ae5a7e7d46b0e071f1b1e4927fef66fd095c62783b2c690e04e47fe1c" Dec 05 12:55:37 crc kubenswrapper[4784]: I1205 12:55:37.348665 4784 scope.go:117] "RemoveContainer" containerID="55dc3dd3218c3888d7da7e7118ddd8a32bf8c9cf47f8e3166dd34b285f30eb28" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.276010 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.280392 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.302037 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.448726 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99jlc\" (UniqueName: \"kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.448952 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.449198 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.551296 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99jlc\" (UniqueName: \"kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.551404 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.551460 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.551992 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.552156 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.574002 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99jlc\" (UniqueName: \"kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc\") pod \"redhat-operators-6qwmp\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.611730 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:41 crc kubenswrapper[4784]: I1205 12:55:41.999223 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:55:41 crc kubenswrapper[4784]: E1205 12:55:41.999755 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:55:42 crc kubenswrapper[4784]: I1205 12:55:42.089654 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:55:42 crc kubenswrapper[4784]: I1205 12:55:42.115725 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerStarted","Data":"f5700785f99eb8c106f56c2fd86d9ea55f2e0973e02b6ed3d6f2450d83e05d36"} Dec 05 12:55:43 crc kubenswrapper[4784]: I1205 12:55:43.128543 4784 generic.go:334] "Generic (PLEG): container finished" podID="14480f5b-23df-48ea-9774-daf857c91130" containerID="307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7" exitCode=0 Dec 05 12:55:43 crc kubenswrapper[4784]: I1205 12:55:43.128631 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerDied","Data":"307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7"} Dec 05 12:55:45 crc kubenswrapper[4784]: I1205 12:55:45.156943 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerStarted","Data":"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693"} Dec 05 12:55:49 crc kubenswrapper[4784]: I1205 12:55:49.199705 4784 generic.go:334] "Generic (PLEG): container finished" podID="14480f5b-23df-48ea-9774-daf857c91130" containerID="e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693" exitCode=0 Dec 05 12:55:49 crc kubenswrapper[4784]: I1205 12:55:49.199802 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerDied","Data":"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693"} Dec 05 12:55:51 crc kubenswrapper[4784]: I1205 12:55:51.218576 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerStarted","Data":"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168"} Dec 05 12:55:51 crc kubenswrapper[4784]: I1205 12:55:51.245814 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6qwmp" podStartSLOduration=3.11376345 podStartE2EDuration="10.245796466s" podCreationTimestamp="2025-12-05 12:55:41 +0000 UTC" firstStartedPulling="2025-12-05 12:55:43.130887821 +0000 UTC m=+1822.550954676" lastFinishedPulling="2025-12-05 12:55:50.262920877 +0000 UTC m=+1829.682987692" observedRunningTime="2025-12-05 12:55:51.236096034 +0000 UTC m=+1830.656162859" watchObservedRunningTime="2025-12-05 12:55:51.245796466 +0000 UTC m=+1830.665863281" Dec 05 12:55:51 crc kubenswrapper[4784]: I1205 12:55:51.612923 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:51 crc kubenswrapper[4784]: I1205 12:55:51.612994 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:55:52 crc kubenswrapper[4784]: I1205 12:55:52.665908 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6qwmp" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="registry-server" probeResult="failure" output=< Dec 05 12:55:52 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 12:55:52 crc kubenswrapper[4784]: > Dec 05 12:55:54 crc kubenswrapper[4784]: I1205 12:55:54.999252 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:55:55 crc kubenswrapper[4784]: E1205 12:55:54.999787 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 12:56:01 crc kubenswrapper[4784]: I1205 12:56:01.683683 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:56:01 crc kubenswrapper[4784]: I1205 12:56:01.751762 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:56:01 crc kubenswrapper[4784]: I1205 12:56:01.935347 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:56:03 crc kubenswrapper[4784]: I1205 12:56:03.345134 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6qwmp" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="registry-server" containerID="cri-o://73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168" gracePeriod=2 Dec 05 12:56:03 crc kubenswrapper[4784]: I1205 12:56:03.845885 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.006124 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities\") pod \"14480f5b-23df-48ea-9774-daf857c91130\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.006170 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99jlc\" (UniqueName: \"kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc\") pod \"14480f5b-23df-48ea-9774-daf857c91130\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.006260 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content\") pod \"14480f5b-23df-48ea-9774-daf857c91130\" (UID: \"14480f5b-23df-48ea-9774-daf857c91130\") " Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.007636 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities" (OuterVolumeSpecName: "utilities") pod "14480f5b-23df-48ea-9774-daf857c91130" (UID: "14480f5b-23df-48ea-9774-daf857c91130"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.012355 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc" (OuterVolumeSpecName: "kube-api-access-99jlc") pod "14480f5b-23df-48ea-9774-daf857c91130" (UID: "14480f5b-23df-48ea-9774-daf857c91130"). InnerVolumeSpecName "kube-api-access-99jlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.109297 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.109344 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99jlc\" (UniqueName: \"kubernetes.io/projected/14480f5b-23df-48ea-9774-daf857c91130-kube-api-access-99jlc\") on node \"crc\" DevicePath \"\"" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.129569 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14480f5b-23df-48ea-9774-daf857c91130" (UID: "14480f5b-23df-48ea-9774-daf857c91130"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.210374 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14480f5b-23df-48ea-9774-daf857c91130-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.358070 4784 generic.go:334] "Generic (PLEG): container finished" podID="14480f5b-23df-48ea-9774-daf857c91130" containerID="73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168" exitCode=0 Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.358134 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qwmp" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.358152 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerDied","Data":"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168"} Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.358575 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qwmp" event={"ID":"14480f5b-23df-48ea-9774-daf857c91130","Type":"ContainerDied","Data":"f5700785f99eb8c106f56c2fd86d9ea55f2e0973e02b6ed3d6f2450d83e05d36"} Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.358613 4784 scope.go:117] "RemoveContainer" containerID="73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.384067 4784 scope.go:117] "RemoveContainer" containerID="e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.405103 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.417310 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6qwmp"] Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.429126 4784 scope.go:117] "RemoveContainer" containerID="307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.482134 4784 scope.go:117] "RemoveContainer" containerID="73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168" Dec 05 12:56:04 crc kubenswrapper[4784]: E1205 12:56:04.482698 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168\": container with ID starting with 73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168 not found: ID does not exist" containerID="73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.482727 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168"} err="failed to get container status \"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168\": rpc error: code = NotFound desc = could not find container \"73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168\": container with ID starting with 73ce6154f7d7c2337647ae5145b45b8b68501fd68d3db97752366b0aae698168 not found: ID does not exist" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.482748 4784 scope.go:117] "RemoveContainer" containerID="e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693" Dec 05 12:56:04 crc kubenswrapper[4784]: E1205 12:56:04.483147 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693\": container with ID starting with e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693 not found: ID does not exist" containerID="e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.483208 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693"} err="failed to get container status \"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693\": rpc error: code = NotFound desc = could not find container \"e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693\": container with ID starting with e9c03e678f37ea77b45a5d9b69ad3737682c998e1b7044dac6ce02eb363b1693 not found: ID does not exist" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.483238 4784 scope.go:117] "RemoveContainer" containerID="307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7" Dec 05 12:56:04 crc kubenswrapper[4784]: E1205 12:56:04.483628 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7\": container with ID starting with 307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7 not found: ID does not exist" containerID="307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7" Dec 05 12:56:04 crc kubenswrapper[4784]: I1205 12:56:04.483660 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7"} err="failed to get container status \"307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7\": rpc error: code = NotFound desc = could not find container \"307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7\": container with ID starting with 307087878dc4bca33ed0cb028154561143fd10676a17a3d0d13da5015a7e82e7 not found: ID does not exist" Dec 05 12:56:05 crc kubenswrapper[4784]: I1205 12:56:05.019427 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14480f5b-23df-48ea-9774-daf857c91130" path="/var/lib/kubelet/pods/14480f5b-23df-48ea-9774-daf857c91130/volumes" Dec 05 12:56:06 crc kubenswrapper[4784]: I1205 12:56:05.999753 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:56:06 crc kubenswrapper[4784]: I1205 12:56:06.378731 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a"} Dec 05 12:56:07 crc kubenswrapper[4784]: I1205 12:56:07.080867 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-xcs6s"] Dec 05 12:56:07 crc kubenswrapper[4784]: I1205 12:56:07.092890 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-xcs6s"] Dec 05 12:56:08 crc kubenswrapper[4784]: I1205 12:56:08.036698 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-vt6gx"] Dec 05 12:56:08 crc kubenswrapper[4784]: I1205 12:56:08.057533 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-vt6gx"] Dec 05 12:56:09 crc kubenswrapper[4784]: I1205 12:56:09.019444 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7216e2bb-f775-4b6b-9f34-b966f26f4002" path="/var/lib/kubelet/pods/7216e2bb-f775-4b6b-9f34-b966f26f4002/volumes" Dec 05 12:56:09 crc kubenswrapper[4784]: I1205 12:56:09.021356 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a94ed534-bf7b-4fdc-9c79-0fa4425cb785" path="/var/lib/kubelet/pods/a94ed534-bf7b-4fdc-9c79-0fa4425cb785/volumes" Dec 05 12:56:25 crc kubenswrapper[4784]: I1205 12:56:25.054639 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zx82c"] Dec 05 12:56:25 crc kubenswrapper[4784]: I1205 12:56:25.063891 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-jptmz"] Dec 05 12:56:25 crc kubenswrapper[4784]: I1205 12:56:25.073559 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zx82c"] Dec 05 12:56:25 crc kubenswrapper[4784]: I1205 12:56:25.084349 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-jptmz"] Dec 05 12:56:27 crc kubenswrapper[4784]: I1205 12:56:27.010805 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b05cb70-952a-4d24-a3e5-cbbff5d53021" path="/var/lib/kubelet/pods/4b05cb70-952a-4d24-a3e5-cbbff5d53021/volumes" Dec 05 12:56:27 crc kubenswrapper[4784]: I1205 12:56:27.011493 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="851aaea1-2d18-4f91-b410-5fdb0a7f42ec" path="/var/lib/kubelet/pods/851aaea1-2d18-4f91-b410-5fdb0a7f42ec/volumes" Dec 05 12:56:36 crc kubenswrapper[4784]: I1205 12:56:36.040760 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-bwtqt"] Dec 05 12:56:36 crc kubenswrapper[4784]: I1205 12:56:36.050652 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-bwtqt"] Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.011543 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c921ceb1-e577-4b4a-be99-3544491930d3" path="/var/lib/kubelet/pods/c921ceb1-e577-4b4a-be99-3544491930d3/volumes" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.641058 4784 scope.go:117] "RemoveContainer" containerID="71e01c7a7c0b9a03eade199a6d9c092e6be85aa8153e328e22b810a9dbe93e98" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.665472 4784 scope.go:117] "RemoveContainer" containerID="ed101b41af71c06e8fc74105f382f320686edc591a1254573e361ce8b50553b4" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.717608 4784 scope.go:117] "RemoveContainer" containerID="ad747a9b988792246f6406b792de3f2be8a3af509d4378bec43549e9fc5f91ca" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.779841 4784 scope.go:117] "RemoveContainer" containerID="064e84eff9398f531da02547908007f542373a3dbc6a82449493cad6c1506a07" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.827532 4784 scope.go:117] "RemoveContainer" containerID="d429f8e576a87219e72979ad510b25bb20f2033eafae69265e04616168a0db2e" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.881411 4784 scope.go:117] "RemoveContainer" containerID="cef934a659ba928393f7fd2f3159621d3c13b65bcacd8f43dbaa52033ec0dda7" Dec 05 12:56:37 crc kubenswrapper[4784]: I1205 12:56:37.932956 4784 scope.go:117] "RemoveContainer" containerID="1c164b5e8d03ab5c187ce9704d36cd74b5e9701d1919c7d4f77c0c11c3625fb3" Dec 05 12:56:38 crc kubenswrapper[4784]: I1205 12:56:38.043815 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-24k4j"] Dec 05 12:56:38 crc kubenswrapper[4784]: I1205 12:56:38.060781 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-24k4j"] Dec 05 12:56:39 crc kubenswrapper[4784]: I1205 12:56:39.014794 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e187592b-b331-4144-9a27-ba81e79121b6" path="/var/lib/kubelet/pods/e187592b-b331-4144-9a27-ba81e79121b6/volumes" Dec 05 12:57:10 crc kubenswrapper[4784]: I1205 12:57:10.044440 4784 generic.go:334] "Generic (PLEG): container finished" podID="a134d50c-87cd-4225-b873-1c6b1d2a0151" containerID="812d8ee055c89845af30c513e5d6c8416eda18fce96fa2f55fddae0b9e1ed641" exitCode=0 Dec 05 12:57:10 crc kubenswrapper[4784]: I1205 12:57:10.044601 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" event={"ID":"a134d50c-87cd-4225-b873-1c6b1d2a0151","Type":"ContainerDied","Data":"812d8ee055c89845af30c513e5d6c8416eda18fce96fa2f55fddae0b9e1ed641"} Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.472958 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.566149 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-499rb\" (UniqueName: \"kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb\") pod \"a134d50c-87cd-4225-b873-1c6b1d2a0151\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.566242 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory\") pod \"a134d50c-87cd-4225-b873-1c6b1d2a0151\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.566471 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key\") pod \"a134d50c-87cd-4225-b873-1c6b1d2a0151\" (UID: \"a134d50c-87cd-4225-b873-1c6b1d2a0151\") " Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.572503 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb" (OuterVolumeSpecName: "kube-api-access-499rb") pod "a134d50c-87cd-4225-b873-1c6b1d2a0151" (UID: "a134d50c-87cd-4225-b873-1c6b1d2a0151"). InnerVolumeSpecName "kube-api-access-499rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.594723 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a134d50c-87cd-4225-b873-1c6b1d2a0151" (UID: "a134d50c-87cd-4225-b873-1c6b1d2a0151"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.595070 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory" (OuterVolumeSpecName: "inventory") pod "a134d50c-87cd-4225-b873-1c6b1d2a0151" (UID: "a134d50c-87cd-4225-b873-1c6b1d2a0151"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.670348 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.670375 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-499rb\" (UniqueName: \"kubernetes.io/projected/a134d50c-87cd-4225-b873-1c6b1d2a0151-kube-api-access-499rb\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:11 crc kubenswrapper[4784]: I1205 12:57:11.670386 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a134d50c-87cd-4225-b873-1c6b1d2a0151-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.071004 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" event={"ID":"a134d50c-87cd-4225-b873-1c6b1d2a0151","Type":"ContainerDied","Data":"966d370474c4686ced4147a1e8b1fbae9a6ccd48c232f4edff4a515495eff6a0"} Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.071051 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="966d370474c4686ced4147a1e8b1fbae9a6ccd48c232f4edff4a515495eff6a0" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.071051 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.169220 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867"] Dec 05 12:57:12 crc kubenswrapper[4784]: E1205 12:57:12.170031 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a134d50c-87cd-4225-b873-1c6b1d2a0151" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170045 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a134d50c-87cd-4225-b873-1c6b1d2a0151" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:57:12 crc kubenswrapper[4784]: E1205 12:57:12.170076 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="registry-server" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170082 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="registry-server" Dec 05 12:57:12 crc kubenswrapper[4784]: E1205 12:57:12.170104 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="extract-content" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170111 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="extract-content" Dec 05 12:57:12 crc kubenswrapper[4784]: E1205 12:57:12.170129 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="extract-utilities" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170135 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="extract-utilities" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170376 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="14480f5b-23df-48ea-9774-daf857c91130" containerName="registry-server" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.170403 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a134d50c-87cd-4225-b873-1c6b1d2a0151" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.171158 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.175317 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.175490 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.175657 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.175757 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.181164 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867"] Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.281887 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snhrl\" (UniqueName: \"kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.282045 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.282113 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.383672 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snhrl\" (UniqueName: \"kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.383768 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.383831 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.388865 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.388878 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.400445 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snhrl\" (UniqueName: \"kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mh867\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:12 crc kubenswrapper[4784]: I1205 12:57:12.531765 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:57:13 crc kubenswrapper[4784]: I1205 12:57:13.083699 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867"] Dec 05 12:57:14 crc kubenswrapper[4784]: I1205 12:57:14.090843 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" event={"ID":"face2e9b-424c-4b68-8b2d-8f00b1e79256","Type":"ContainerStarted","Data":"ad1c5775de736b5ab1a606b583ec74fbe2df25c241ddf2926223cdad912926f6"} Dec 05 12:57:14 crc kubenswrapper[4784]: I1205 12:57:14.091164 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" event={"ID":"face2e9b-424c-4b68-8b2d-8f00b1e79256","Type":"ContainerStarted","Data":"fefdd6a7bd2281afc679c46dcf1bdf5f8dd0ce39d2cfe50124c95b691593a3ae"} Dec 05 12:57:14 crc kubenswrapper[4784]: I1205 12:57:14.117163 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" podStartSLOduration=1.6247256129999998 podStartE2EDuration="2.11714629s" podCreationTimestamp="2025-12-05 12:57:12 +0000 UTC" firstStartedPulling="2025-12-05 12:57:13.089576768 +0000 UTC m=+1912.509643583" lastFinishedPulling="2025-12-05 12:57:13.581997445 +0000 UTC m=+1913.002064260" observedRunningTime="2025-12-05 12:57:14.10885713 +0000 UTC m=+1913.528924005" watchObservedRunningTime="2025-12-05 12:57:14.11714629 +0000 UTC m=+1913.537213095" Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.044865 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-3537-account-create-update-gb6qr"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.060782 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8da5-account-create-update-hblwl"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.073266 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-7701-account-create-update-mcnx7"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.084116 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-7rms5"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.093947 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-3537-account-create-update-gb6qr"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.103535 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-bjf5q"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.111329 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8da5-account-create-update-hblwl"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.137328 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-7701-account-create-update-mcnx7"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.146715 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-7rms5"] Dec 05 12:57:16 crc kubenswrapper[4784]: I1205 12:57:16.156117 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-bjf5q"] Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.013173 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="124aed59-cf1a-4150-adf5-a055107f2834" path="/var/lib/kubelet/pods/124aed59-cf1a-4150-adf5-a055107f2834/volumes" Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.014410 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a5de896-0d89-42c3-a59b-d07f226e76dc" path="/var/lib/kubelet/pods/6a5de896-0d89-42c3-a59b-d07f226e76dc/volumes" Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.015125 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96d6e94d-da77-4211-8009-cec8d1ae70b4" path="/var/lib/kubelet/pods/96d6e94d-da77-4211-8009-cec8d1ae70b4/volumes" Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.018956 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb722f31-d720-4de5-9a0e-cdbb3af2a535" path="/var/lib/kubelet/pods/bb722f31-d720-4de5-9a0e-cdbb3af2a535/volumes" Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.019768 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d81b0108-f196-4724-90bb-c60348271f96" path="/var/lib/kubelet/pods/d81b0108-f196-4724-90bb-c60348271f96/volumes" Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.036035 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-n6b2l"] Dec 05 12:57:17 crc kubenswrapper[4784]: I1205 12:57:17.049153 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-n6b2l"] Dec 05 12:57:19 crc kubenswrapper[4784]: I1205 12:57:19.009696 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64467fbc-3720-453a-af74-aee0374aaa3a" path="/var/lib/kubelet/pods/64467fbc-3720-453a-af74-aee0374aaa3a/volumes" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.140427 4784 scope.go:117] "RemoveContainer" containerID="aa78a190a91f98432378dd813c10b6fabd8064d364217c443227c576cce83f68" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.181596 4784 scope.go:117] "RemoveContainer" containerID="69b9d3a278579160fd9883a122e79c24ea4a2bd12016697befcfbf4c2c7efeda" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.237475 4784 scope.go:117] "RemoveContainer" containerID="fe7d87073b246c6fa9e935b02e2358264abb29fe814925789307c5ba3835fd3f" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.289268 4784 scope.go:117] "RemoveContainer" containerID="96a2703a1293e4e404b140d7cf2771dc71d0134eac8f0652208c1b8a2129a643" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.334594 4784 scope.go:117] "RemoveContainer" containerID="153b9c8c09bb1983c0bd0729b56ac8c11d53017d9e5d21afcf783bc4e7b3c359" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.397907 4784 scope.go:117] "RemoveContainer" containerID="1213d3b0b9294f443d495b5c6d9c5553af7b9da7088f306c89099523e7cb4934" Dec 05 12:57:38 crc kubenswrapper[4784]: I1205 12:57:38.435792 4784 scope.go:117] "RemoveContainer" containerID="9ca5c5ea89f132cef3d5fd189b8aa7413fcde4f29888afc5734f2988548f38f5" Dec 05 12:57:51 crc kubenswrapper[4784]: I1205 12:57:51.046657 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-glpkn"] Dec 05 12:57:51 crc kubenswrapper[4784]: I1205 12:57:51.057557 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-glpkn"] Dec 05 12:57:53 crc kubenswrapper[4784]: I1205 12:57:53.009102 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26e5617b-90b9-451b-a0a1-6f43d885ab38" path="/var/lib/kubelet/pods/26e5617b-90b9-451b-a0a1-6f43d885ab38/volumes" Dec 05 12:58:24 crc kubenswrapper[4784]: I1205 12:58:24.042775 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-9qvf2"] Dec 05 12:58:24 crc kubenswrapper[4784]: I1205 12:58:24.055775 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-9qvf2"] Dec 05 12:58:25 crc kubenswrapper[4784]: I1205 12:58:25.016053 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31e25db3-7d9e-43b2-8e5a-b6956be5114e" path="/var/lib/kubelet/pods/31e25db3-7d9e-43b2-8e5a-b6956be5114e/volumes" Dec 05 12:58:27 crc kubenswrapper[4784]: I1205 12:58:27.036618 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69577"] Dec 05 12:58:27 crc kubenswrapper[4784]: I1205 12:58:27.053960 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-69577"] Dec 05 12:58:29 crc kubenswrapper[4784]: I1205 12:58:29.018144 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7b90226-6cca-424b-9a49-d2f1bf8c289f" path="/var/lib/kubelet/pods/e7b90226-6cca-424b-9a49-d2f1bf8c289f/volumes" Dec 05 12:58:29 crc kubenswrapper[4784]: I1205 12:58:29.572555 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:58:29 crc kubenswrapper[4784]: I1205 12:58:29.572612 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:58:32 crc kubenswrapper[4784]: I1205 12:58:32.973182 4784 generic.go:334] "Generic (PLEG): container finished" podID="face2e9b-424c-4b68-8b2d-8f00b1e79256" containerID="ad1c5775de736b5ab1a606b583ec74fbe2df25c241ddf2926223cdad912926f6" exitCode=0 Dec 05 12:58:32 crc kubenswrapper[4784]: I1205 12:58:32.973307 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" event={"ID":"face2e9b-424c-4b68-8b2d-8f00b1e79256","Type":"ContainerDied","Data":"ad1c5775de736b5ab1a606b583ec74fbe2df25c241ddf2926223cdad912926f6"} Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.419435 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.535852 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key\") pod \"face2e9b-424c-4b68-8b2d-8f00b1e79256\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.535982 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snhrl\" (UniqueName: \"kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl\") pod \"face2e9b-424c-4b68-8b2d-8f00b1e79256\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.536590 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory\") pod \"face2e9b-424c-4b68-8b2d-8f00b1e79256\" (UID: \"face2e9b-424c-4b68-8b2d-8f00b1e79256\") " Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.545291 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl" (OuterVolumeSpecName: "kube-api-access-snhrl") pod "face2e9b-424c-4b68-8b2d-8f00b1e79256" (UID: "face2e9b-424c-4b68-8b2d-8f00b1e79256"). InnerVolumeSpecName "kube-api-access-snhrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.555763 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snhrl\" (UniqueName: \"kubernetes.io/projected/face2e9b-424c-4b68-8b2d-8f00b1e79256-kube-api-access-snhrl\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.568654 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "face2e9b-424c-4b68-8b2d-8f00b1e79256" (UID: "face2e9b-424c-4b68-8b2d-8f00b1e79256"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.588601 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory" (OuterVolumeSpecName: "inventory") pod "face2e9b-424c-4b68-8b2d-8f00b1e79256" (UID: "face2e9b-424c-4b68-8b2d-8f00b1e79256"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.657129 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.657161 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/face2e9b-424c-4b68-8b2d-8f00b1e79256-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.997538 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" event={"ID":"face2e9b-424c-4b68-8b2d-8f00b1e79256","Type":"ContainerDied","Data":"fefdd6a7bd2281afc679c46dcf1bdf5f8dd0ce39d2cfe50124c95b691593a3ae"} Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.997766 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fefdd6a7bd2281afc679c46dcf1bdf5f8dd0ce39d2cfe50124c95b691593a3ae" Dec 05 12:58:34 crc kubenswrapper[4784]: I1205 12:58:34.997592 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mh867" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.093548 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl"] Dec 05 12:58:35 crc kubenswrapper[4784]: E1205 12:58:35.093971 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="face2e9b-424c-4b68-8b2d-8f00b1e79256" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.093993 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="face2e9b-424c-4b68-8b2d-8f00b1e79256" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.094293 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="face2e9b-424c-4b68-8b2d-8f00b1e79256" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.094931 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.097318 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.097551 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.097647 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.097830 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.114436 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl"] Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.271030 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.271106 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkhxw\" (UniqueName: \"kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.271209 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.373453 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkhxw\" (UniqueName: \"kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.373944 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.374279 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.381753 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.382334 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.405053 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkhxw\" (UniqueName: \"kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:35 crc kubenswrapper[4784]: I1205 12:58:35.417011 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:36 crc kubenswrapper[4784]: I1205 12:58:36.011133 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl"] Dec 05 12:58:37 crc kubenswrapper[4784]: I1205 12:58:37.037977 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" event={"ID":"c4647929-264c-4fe3-b2ee-f543c25a50d0","Type":"ContainerStarted","Data":"41b1313326a46d64eb4bc756e1ebc77a1f801ef51b6a498c99f84b9de3c2f435"} Dec 05 12:58:38 crc kubenswrapper[4784]: I1205 12:58:38.051656 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" event={"ID":"c4647929-264c-4fe3-b2ee-f543c25a50d0","Type":"ContainerStarted","Data":"948dc011d58c363a2e4f0cd8742b234830d21bd7442d6b629d4b166af47b8df0"} Dec 05 12:58:38 crc kubenswrapper[4784]: I1205 12:58:38.065899 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" podStartSLOduration=2.306866551 podStartE2EDuration="3.06588289s" podCreationTimestamp="2025-12-05 12:58:35 +0000 UTC" firstStartedPulling="2025-12-05 12:58:36.013744671 +0000 UTC m=+1995.433811516" lastFinishedPulling="2025-12-05 12:58:36.77276103 +0000 UTC m=+1996.192827855" observedRunningTime="2025-12-05 12:58:38.06426364 +0000 UTC m=+1997.484330475" watchObservedRunningTime="2025-12-05 12:58:38.06588289 +0000 UTC m=+1997.485949705" Dec 05 12:58:38 crc kubenswrapper[4784]: I1205 12:58:38.586724 4784 scope.go:117] "RemoveContainer" containerID="602aae4c1761197ed50d137a50507c7c6bfbc79715901569be87bb7d536fdc47" Dec 05 12:58:38 crc kubenswrapper[4784]: I1205 12:58:38.641765 4784 scope.go:117] "RemoveContainer" containerID="0f35c6424f83e93161e92f8c310874f70e2e61e59304c6f30ed83bb875f0de4b" Dec 05 12:58:38 crc kubenswrapper[4784]: I1205 12:58:38.676931 4784 scope.go:117] "RemoveContainer" containerID="36e33246f62869ea234612af8ba86e05fad0a2809ea7858bd8d9d4f14dc6facf" Dec 05 12:58:42 crc kubenswrapper[4784]: I1205 12:58:42.098580 4784 generic.go:334] "Generic (PLEG): container finished" podID="c4647929-264c-4fe3-b2ee-f543c25a50d0" containerID="948dc011d58c363a2e4f0cd8742b234830d21bd7442d6b629d4b166af47b8df0" exitCode=0 Dec 05 12:58:42 crc kubenswrapper[4784]: I1205 12:58:42.098641 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" event={"ID":"c4647929-264c-4fe3-b2ee-f543c25a50d0","Type":"ContainerDied","Data":"948dc011d58c363a2e4f0cd8742b234830d21bd7442d6b629d4b166af47b8df0"} Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.564597 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.753883 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory\") pod \"c4647929-264c-4fe3-b2ee-f543c25a50d0\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.754222 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key\") pod \"c4647929-264c-4fe3-b2ee-f543c25a50d0\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.754341 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkhxw\" (UniqueName: \"kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw\") pod \"c4647929-264c-4fe3-b2ee-f543c25a50d0\" (UID: \"c4647929-264c-4fe3-b2ee-f543c25a50d0\") " Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.760931 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw" (OuterVolumeSpecName: "kube-api-access-zkhxw") pod "c4647929-264c-4fe3-b2ee-f543c25a50d0" (UID: "c4647929-264c-4fe3-b2ee-f543c25a50d0"). InnerVolumeSpecName "kube-api-access-zkhxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.790667 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c4647929-264c-4fe3-b2ee-f543c25a50d0" (UID: "c4647929-264c-4fe3-b2ee-f543c25a50d0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.791640 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory" (OuterVolumeSpecName: "inventory") pod "c4647929-264c-4fe3-b2ee-f543c25a50d0" (UID: "c4647929-264c-4fe3-b2ee-f543c25a50d0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.857618 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.857674 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4647929-264c-4fe3-b2ee-f543c25a50d0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:43 crc kubenswrapper[4784]: I1205 12:58:43.857692 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkhxw\" (UniqueName: \"kubernetes.io/projected/c4647929-264c-4fe3-b2ee-f543c25a50d0-kube-api-access-zkhxw\") on node \"crc\" DevicePath \"\"" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.117405 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" event={"ID":"c4647929-264c-4fe3-b2ee-f543c25a50d0","Type":"ContainerDied","Data":"41b1313326a46d64eb4bc756e1ebc77a1f801ef51b6a498c99f84b9de3c2f435"} Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.117722 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41b1313326a46d64eb4bc756e1ebc77a1f801ef51b6a498c99f84b9de3c2f435" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.117474 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.204675 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8"] Dec 05 12:58:44 crc kubenswrapper[4784]: E1205 12:58:44.205072 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4647929-264c-4fe3-b2ee-f543c25a50d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.205094 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4647929-264c-4fe3-b2ee-f543c25a50d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.205398 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4647929-264c-4fe3-b2ee-f543c25a50d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.206508 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.208337 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.209088 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.209342 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.213553 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.217477 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8"] Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.367032 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.367083 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrzpv\" (UniqueName: \"kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.367472 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.469029 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.469143 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.469175 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrzpv\" (UniqueName: \"kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.474577 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.475237 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.495178 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrzpv\" (UniqueName: \"kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kphz8\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.522798 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:58:44 crc kubenswrapper[4784]: I1205 12:58:44.926063 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8"] Dec 05 12:58:45 crc kubenswrapper[4784]: I1205 12:58:45.137418 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" event={"ID":"db4f2cc1-d1a2-42af-a45b-04e866b92d97","Type":"ContainerStarted","Data":"016d7928824a3d1896fad2fd26734c4330478aaa7ad4f8c96b7a67cbd38bb1d9"} Dec 05 12:58:46 crc kubenswrapper[4784]: I1205 12:58:46.152472 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" event={"ID":"db4f2cc1-d1a2-42af-a45b-04e866b92d97","Type":"ContainerStarted","Data":"b5ac25f8f137b7d1e12e3e429c36efac9543056d56d3351b73086403ff7c0063"} Dec 05 12:58:46 crc kubenswrapper[4784]: I1205 12:58:46.178870 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" podStartSLOduration=1.771126038 podStartE2EDuration="2.178845542s" podCreationTimestamp="2025-12-05 12:58:44 +0000 UTC" firstStartedPulling="2025-12-05 12:58:44.933747755 +0000 UTC m=+2004.353814610" lastFinishedPulling="2025-12-05 12:58:45.341467299 +0000 UTC m=+2004.761534114" observedRunningTime="2025-12-05 12:58:46.166785614 +0000 UTC m=+2005.586852449" watchObservedRunningTime="2025-12-05 12:58:46.178845542 +0000 UTC m=+2005.598912357" Dec 05 12:58:59 crc kubenswrapper[4784]: I1205 12:58:59.572126 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:58:59 crc kubenswrapper[4784]: I1205 12:58:59.572819 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.219242 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.222485 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.252300 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.279534 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.279604 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.279630 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdfpv\" (UniqueName: \"kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.382369 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.382519 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdfpv\" (UniqueName: \"kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.383153 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.384043 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.385103 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.427233 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdfpv\" (UniqueName: \"kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv\") pod \"redhat-marketplace-kzvtd\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:05 crc kubenswrapper[4784]: I1205 12:59:05.548603 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:06 crc kubenswrapper[4784]: I1205 12:59:06.097586 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:06 crc kubenswrapper[4784]: I1205 12:59:06.347290 4784 generic.go:334] "Generic (PLEG): container finished" podID="4bef3c49-f297-4351-b4b0-024d267743ef" containerID="f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46" exitCode=0 Dec 05 12:59:06 crc kubenswrapper[4784]: I1205 12:59:06.347385 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerDied","Data":"f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46"} Dec 05 12:59:06 crc kubenswrapper[4784]: I1205 12:59:06.347597 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerStarted","Data":"bac99df56770b8cf62ebcfcca60fbc0589186f302dcaabec0386dedb0cfba909"} Dec 05 12:59:07 crc kubenswrapper[4784]: I1205 12:59:07.361243 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerStarted","Data":"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c"} Dec 05 12:59:08 crc kubenswrapper[4784]: I1205 12:59:08.375720 4784 generic.go:334] "Generic (PLEG): container finished" podID="4bef3c49-f297-4351-b4b0-024d267743ef" containerID="c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c" exitCode=0 Dec 05 12:59:08 crc kubenswrapper[4784]: I1205 12:59:08.375770 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerDied","Data":"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c"} Dec 05 12:59:09 crc kubenswrapper[4784]: I1205 12:59:09.386073 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerStarted","Data":"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac"} Dec 05 12:59:09 crc kubenswrapper[4784]: I1205 12:59:09.411735 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kzvtd" podStartSLOduration=1.8476445460000002 podStartE2EDuration="4.411712023s" podCreationTimestamp="2025-12-05 12:59:05 +0000 UTC" firstStartedPulling="2025-12-05 12:59:06.349360906 +0000 UTC m=+2025.769427721" lastFinishedPulling="2025-12-05 12:59:08.913428373 +0000 UTC m=+2028.333495198" observedRunningTime="2025-12-05 12:59:09.403288419 +0000 UTC m=+2028.823355244" watchObservedRunningTime="2025-12-05 12:59:09.411712023 +0000 UTC m=+2028.831778838" Dec 05 12:59:10 crc kubenswrapper[4784]: I1205 12:59:10.037818 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-lqbxc"] Dec 05 12:59:10 crc kubenswrapper[4784]: I1205 12:59:10.046537 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-lqbxc"] Dec 05 12:59:11 crc kubenswrapper[4784]: I1205 12:59:11.010870 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c32bb62c-16f9-429c-a3b8-1edf96d0261c" path="/var/lib/kubelet/pods/c32bb62c-16f9-429c-a3b8-1edf96d0261c/volumes" Dec 05 12:59:15 crc kubenswrapper[4784]: I1205 12:59:15.549279 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:15 crc kubenswrapper[4784]: I1205 12:59:15.550095 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:15 crc kubenswrapper[4784]: I1205 12:59:15.614952 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:16 crc kubenswrapper[4784]: I1205 12:59:16.565127 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:16 crc kubenswrapper[4784]: I1205 12:59:16.625468 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:18 crc kubenswrapper[4784]: I1205 12:59:18.498523 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kzvtd" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="registry-server" containerID="cri-o://9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac" gracePeriod=2 Dec 05 12:59:18 crc kubenswrapper[4784]: I1205 12:59:18.979934 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.080876 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content\") pod \"4bef3c49-f297-4351-b4b0-024d267743ef\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.081047 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities\") pod \"4bef3c49-f297-4351-b4b0-024d267743ef\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.081149 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdfpv\" (UniqueName: \"kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv\") pod \"4bef3c49-f297-4351-b4b0-024d267743ef\" (UID: \"4bef3c49-f297-4351-b4b0-024d267743ef\") " Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.082516 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities" (OuterVolumeSpecName: "utilities") pod "4bef3c49-f297-4351-b4b0-024d267743ef" (UID: "4bef3c49-f297-4351-b4b0-024d267743ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.088789 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv" (OuterVolumeSpecName: "kube-api-access-bdfpv") pod "4bef3c49-f297-4351-b4b0-024d267743ef" (UID: "4bef3c49-f297-4351-b4b0-024d267743ef"). InnerVolumeSpecName "kube-api-access-bdfpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.107179 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bef3c49-f297-4351-b4b0-024d267743ef" (UID: "4bef3c49-f297-4351-b4b0-024d267743ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.183490 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.183980 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdfpv\" (UniqueName: \"kubernetes.io/projected/4bef3c49-f297-4351-b4b0-024d267743ef-kube-api-access-bdfpv\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.184046 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bef3c49-f297-4351-b4b0-024d267743ef-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.512097 4784 generic.go:334] "Generic (PLEG): container finished" podID="4bef3c49-f297-4351-b4b0-024d267743ef" containerID="9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac" exitCode=0 Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.512143 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerDied","Data":"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac"} Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.512177 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kzvtd" event={"ID":"4bef3c49-f297-4351-b4b0-024d267743ef","Type":"ContainerDied","Data":"bac99df56770b8cf62ebcfcca60fbc0589186f302dcaabec0386dedb0cfba909"} Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.512283 4784 scope.go:117] "RemoveContainer" containerID="9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.512444 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kzvtd" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.545501 4784 scope.go:117] "RemoveContainer" containerID="c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.553409 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.563036 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kzvtd"] Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.581108 4784 scope.go:117] "RemoveContainer" containerID="f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.617390 4784 scope.go:117] "RemoveContainer" containerID="9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac" Dec 05 12:59:19 crc kubenswrapper[4784]: E1205 12:59:19.617861 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac\": container with ID starting with 9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac not found: ID does not exist" containerID="9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.617909 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac"} err="failed to get container status \"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac\": rpc error: code = NotFound desc = could not find container \"9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac\": container with ID starting with 9985adbeac659aabae9543c4100c9ffc6651e0a29e8a53537701445a59198dac not found: ID does not exist" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.617939 4784 scope.go:117] "RemoveContainer" containerID="c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c" Dec 05 12:59:19 crc kubenswrapper[4784]: E1205 12:59:19.618255 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c\": container with ID starting with c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c not found: ID does not exist" containerID="c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.618287 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c"} err="failed to get container status \"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c\": rpc error: code = NotFound desc = could not find container \"c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c\": container with ID starting with c399b15ca58526dfdc6384ca3dfd8af38ad3331d91149faf99227f55d4a7932c not found: ID does not exist" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.618300 4784 scope.go:117] "RemoveContainer" containerID="f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46" Dec 05 12:59:19 crc kubenswrapper[4784]: E1205 12:59:19.618523 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46\": container with ID starting with f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46 not found: ID does not exist" containerID="f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46" Dec 05 12:59:19 crc kubenswrapper[4784]: I1205 12:59:19.618547 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46"} err="failed to get container status \"f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46\": rpc error: code = NotFound desc = could not find container \"f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46\": container with ID starting with f3147598d8c1fd1b7b8cf93bdcce96cae5fb43e4b417176ed028d12a5ed79d46 not found: ID does not exist" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.042743 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" path="/var/lib/kubelet/pods/4bef3c49-f297-4351-b4b0-024d267743ef/volumes" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.271627 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:21 crc kubenswrapper[4784]: E1205 12:59:21.272158 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="registry-server" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.272179 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="registry-server" Dec 05 12:59:21 crc kubenswrapper[4784]: E1205 12:59:21.272245 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="extract-content" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.272254 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="extract-content" Dec 05 12:59:21 crc kubenswrapper[4784]: E1205 12:59:21.272268 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="extract-utilities" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.272276 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="extract-utilities" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.272537 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bef3c49-f297-4351-b4b0-024d267743ef" containerName="registry-server" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.274438 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.289475 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.338094 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.338717 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.338775 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jsff\" (UniqueName: \"kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.439717 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.439862 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.439883 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jsff\" (UniqueName: \"kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.440294 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.440334 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.459950 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jsff\" (UniqueName: \"kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff\") pod \"community-operators-fx86k\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:21 crc kubenswrapper[4784]: I1205 12:59:21.617244 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:22 crc kubenswrapper[4784]: I1205 12:59:22.191280 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:22 crc kubenswrapper[4784]: I1205 12:59:22.549318 4784 generic.go:334] "Generic (PLEG): container finished" podID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerID="6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2" exitCode=0 Dec 05 12:59:22 crc kubenswrapper[4784]: I1205 12:59:22.549597 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerDied","Data":"6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2"} Dec 05 12:59:22 crc kubenswrapper[4784]: I1205 12:59:22.549623 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerStarted","Data":"79c04a133b256f98e0200c691631034cfbb3efed6ecbec5b1366f36bfa4fc0d2"} Dec 05 12:59:23 crc kubenswrapper[4784]: I1205 12:59:23.562502 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerStarted","Data":"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a"} Dec 05 12:59:24 crc kubenswrapper[4784]: I1205 12:59:24.575334 4784 generic.go:334] "Generic (PLEG): container finished" podID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerID="bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a" exitCode=0 Dec 05 12:59:24 crc kubenswrapper[4784]: I1205 12:59:24.575381 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerDied","Data":"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a"} Dec 05 12:59:25 crc kubenswrapper[4784]: I1205 12:59:25.590130 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerStarted","Data":"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6"} Dec 05 12:59:25 crc kubenswrapper[4784]: I1205 12:59:25.617591 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fx86k" podStartSLOduration=2.175561513 podStartE2EDuration="4.617551794s" podCreationTimestamp="2025-12-05 12:59:21 +0000 UTC" firstStartedPulling="2025-12-05 12:59:22.551287196 +0000 UTC m=+2041.971354011" lastFinishedPulling="2025-12-05 12:59:24.993277437 +0000 UTC m=+2044.413344292" observedRunningTime="2025-12-05 12:59:25.607276233 +0000 UTC m=+2045.027343098" watchObservedRunningTime="2025-12-05 12:59:25.617551794 +0000 UTC m=+2045.037618609" Dec 05 12:59:27 crc kubenswrapper[4784]: I1205 12:59:27.622105 4784 generic.go:334] "Generic (PLEG): container finished" podID="db4f2cc1-d1a2-42af-a45b-04e866b92d97" containerID="b5ac25f8f137b7d1e12e3e429c36efac9543056d56d3351b73086403ff7c0063" exitCode=0 Dec 05 12:59:27 crc kubenswrapper[4784]: I1205 12:59:27.622268 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" event={"ID":"db4f2cc1-d1a2-42af-a45b-04e866b92d97","Type":"ContainerDied","Data":"b5ac25f8f137b7d1e12e3e429c36efac9543056d56d3351b73086403ff7c0063"} Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.125817 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.210730 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key\") pod \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.210831 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory\") pod \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.242940 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "db4f2cc1-d1a2-42af-a45b-04e866b92d97" (UID: "db4f2cc1-d1a2-42af-a45b-04e866b92d97"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.245104 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory" (OuterVolumeSpecName: "inventory") pod "db4f2cc1-d1a2-42af-a45b-04e866b92d97" (UID: "db4f2cc1-d1a2-42af-a45b-04e866b92d97"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.313479 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrzpv\" (UniqueName: \"kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv\") pod \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\" (UID: \"db4f2cc1-d1a2-42af-a45b-04e866b92d97\") " Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.313858 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.313879 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db4f2cc1-d1a2-42af-a45b-04e866b92d97-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.318324 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv" (OuterVolumeSpecName: "kube-api-access-zrzpv") pod "db4f2cc1-d1a2-42af-a45b-04e866b92d97" (UID: "db4f2cc1-d1a2-42af-a45b-04e866b92d97"). InnerVolumeSpecName "kube-api-access-zrzpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.415590 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrzpv\" (UniqueName: \"kubernetes.io/projected/db4f2cc1-d1a2-42af-a45b-04e866b92d97-kube-api-access-zrzpv\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.572578 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.572636 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.572685 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.573514 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.573580 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a" gracePeriod=600 Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.647010 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" event={"ID":"db4f2cc1-d1a2-42af-a45b-04e866b92d97","Type":"ContainerDied","Data":"016d7928824a3d1896fad2fd26734c4330478aaa7ad4f8c96b7a67cbd38bb1d9"} Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.647344 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kphz8" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.647337 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="016d7928824a3d1896fad2fd26734c4330478aaa7ad4f8c96b7a67cbd38bb1d9" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.746524 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5"] Dec 05 12:59:29 crc kubenswrapper[4784]: E1205 12:59:29.747002 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db4f2cc1-d1a2-42af-a45b-04e866b92d97" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.747020 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="db4f2cc1-d1a2-42af-a45b-04e866b92d97" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.747237 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="db4f2cc1-d1a2-42af-a45b-04e866b92d97" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.748140 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.752262 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.752936 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.753086 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.753656 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.773241 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5"] Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.821575 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.821634 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.821676 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbjm5\" (UniqueName: \"kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.923552 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbjm5\" (UniqueName: \"kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.923792 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.923845 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.928807 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.928878 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:29 crc kubenswrapper[4784]: I1205 12:59:29.951452 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbjm5\" (UniqueName: \"kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.078252 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.662787 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a" exitCode=0 Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.663131 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a"} Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.663174 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9"} Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.663229 4784 scope.go:117] "RemoveContainer" containerID="1e04a48f7470e663550729d5a6189f2705f6151f92d045a29c6589f7e1ee84a9" Dec 05 12:59:30 crc kubenswrapper[4784]: I1205 12:59:30.732448 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5"] Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.618050 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.618733 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.677935 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" event={"ID":"b0194359-b6ce-4590-b835-c81b0c992ca1","Type":"ContainerStarted","Data":"a66b35ec710a33bee6fa51f44fc7cda12e3fac0c4bb6d8d4b0935c8134a2ea2e"} Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.678885 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" event={"ID":"b0194359-b6ce-4590-b835-c81b0c992ca1","Type":"ContainerStarted","Data":"269692f7ecc152c873e2e15240c5749df0b43238208a37bc8044bf8b3ea48163"} Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.685073 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.700526 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" podStartSLOduration=2.305132423 podStartE2EDuration="2.700508449s" podCreationTimestamp="2025-12-05 12:59:29 +0000 UTC" firstStartedPulling="2025-12-05 12:59:30.738021857 +0000 UTC m=+2050.158088672" lastFinishedPulling="2025-12-05 12:59:31.133397873 +0000 UTC m=+2050.553464698" observedRunningTime="2025-12-05 12:59:31.694837722 +0000 UTC m=+2051.114904537" watchObservedRunningTime="2025-12-05 12:59:31.700508449 +0000 UTC m=+2051.120575264" Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.760110 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:31 crc kubenswrapper[4784]: I1205 12:59:31.942345 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:33 crc kubenswrapper[4784]: I1205 12:59:33.697895 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fx86k" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="registry-server" containerID="cri-o://e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6" gracePeriod=2 Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.158136 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.310629 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content\") pod \"d50f6999-3630-4528-98bb-5cad3a96cec5\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.310694 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jsff\" (UniqueName: \"kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff\") pod \"d50f6999-3630-4528-98bb-5cad3a96cec5\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.310887 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities\") pod \"d50f6999-3630-4528-98bb-5cad3a96cec5\" (UID: \"d50f6999-3630-4528-98bb-5cad3a96cec5\") " Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.312575 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities" (OuterVolumeSpecName: "utilities") pod "d50f6999-3630-4528-98bb-5cad3a96cec5" (UID: "d50f6999-3630-4528-98bb-5cad3a96cec5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.317493 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff" (OuterVolumeSpecName: "kube-api-access-7jsff") pod "d50f6999-3630-4528-98bb-5cad3a96cec5" (UID: "d50f6999-3630-4528-98bb-5cad3a96cec5"). InnerVolumeSpecName "kube-api-access-7jsff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.400965 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d50f6999-3630-4528-98bb-5cad3a96cec5" (UID: "d50f6999-3630-4528-98bb-5cad3a96cec5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.413370 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.413418 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jsff\" (UniqueName: \"kubernetes.io/projected/d50f6999-3630-4528-98bb-5cad3a96cec5-kube-api-access-7jsff\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.413436 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50f6999-3630-4528-98bb-5cad3a96cec5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.728462 4784 generic.go:334] "Generic (PLEG): container finished" podID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerID="e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6" exitCode=0 Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.728560 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerDied","Data":"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6"} Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.728816 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fx86k" event={"ID":"d50f6999-3630-4528-98bb-5cad3a96cec5","Type":"ContainerDied","Data":"79c04a133b256f98e0200c691631034cfbb3efed6ecbec5b1366f36bfa4fc0d2"} Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.728592 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fx86k" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.728915 4784 scope.go:117] "RemoveContainer" containerID="e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.769747 4784 scope.go:117] "RemoveContainer" containerID="bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.776619 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.784396 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fx86k"] Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.792304 4784 scope.go:117] "RemoveContainer" containerID="6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.854025 4784 scope.go:117] "RemoveContainer" containerID="e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6" Dec 05 12:59:34 crc kubenswrapper[4784]: E1205 12:59:34.855673 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6\": container with ID starting with e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6 not found: ID does not exist" containerID="e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.855719 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6"} err="failed to get container status \"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6\": rpc error: code = NotFound desc = could not find container \"e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6\": container with ID starting with e6ea5718a6972ab68d6effffe4222880a4d5b305c140f27ec9095eef447ae4a6 not found: ID does not exist" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.855750 4784 scope.go:117] "RemoveContainer" containerID="bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a" Dec 05 12:59:34 crc kubenswrapper[4784]: E1205 12:59:34.856146 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a\": container with ID starting with bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a not found: ID does not exist" containerID="bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.856179 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a"} err="failed to get container status \"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a\": rpc error: code = NotFound desc = could not find container \"bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a\": container with ID starting with bbf9a29b118d5b09973454e58d3f1869aa1b7d191c4206ee6f3956d573001c7a not found: ID does not exist" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.856215 4784 scope.go:117] "RemoveContainer" containerID="6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2" Dec 05 12:59:34 crc kubenswrapper[4784]: E1205 12:59:34.856517 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2\": container with ID starting with 6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2 not found: ID does not exist" containerID="6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2" Dec 05 12:59:34 crc kubenswrapper[4784]: I1205 12:59:34.856545 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2"} err="failed to get container status \"6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2\": rpc error: code = NotFound desc = could not find container \"6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2\": container with ID starting with 6f60bb77fa73170ea084049af2c27cc2cf0cace77fb57a956c78950daed296b2 not found: ID does not exist" Dec 05 12:59:35 crc kubenswrapper[4784]: I1205 12:59:35.011371 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" path="/var/lib/kubelet/pods/d50f6999-3630-4528-98bb-5cad3a96cec5/volumes" Dec 05 12:59:38 crc kubenswrapper[4784]: I1205 12:59:38.782658 4784 scope.go:117] "RemoveContainer" containerID="4223db0146f6cfea315d03b2cbe6017284202579bbed8698ffb35e3e5fb1566c" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.181353 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh"] Dec 05 13:00:00 crc kubenswrapper[4784]: E1205 13:00:00.182754 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="extract-content" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.182777 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="extract-content" Dec 05 13:00:00 crc kubenswrapper[4784]: E1205 13:00:00.182804 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="registry-server" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.182812 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="registry-server" Dec 05 13:00:00 crc kubenswrapper[4784]: E1205 13:00:00.182830 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="extract-utilities" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.182839 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="extract-utilities" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.183097 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50f6999-3630-4528-98bb-5cad3a96cec5" containerName="registry-server" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.184125 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.196701 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.197669 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.203389 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh"] Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.276896 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdn4b\" (UniqueName: \"kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.276960 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.277004 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.379387 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdn4b\" (UniqueName: \"kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.379460 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.379492 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.380625 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.387265 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.397654 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdn4b\" (UniqueName: \"kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b\") pod \"collect-profiles-29415660-jbvrh\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.525897 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.928715 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh"] Dec 05 13:00:00 crc kubenswrapper[4784]: I1205 13:00:00.977290 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" event={"ID":"65395f2b-fd20-43e9-860e-3a35033375bf","Type":"ContainerStarted","Data":"9b56c2c1cb6b1e0add4488052df9187ab2e1a533cec69d18091d97eca4d805f5"} Dec 05 13:00:01 crc kubenswrapper[4784]: I1205 13:00:01.990159 4784 generic.go:334] "Generic (PLEG): container finished" podID="65395f2b-fd20-43e9-860e-3a35033375bf" containerID="f38242b277a2a8c10172715feb9edc687484e10ab61959c758fa871365ec3da6" exitCode=0 Dec 05 13:00:01 crc kubenswrapper[4784]: I1205 13:00:01.990262 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" event={"ID":"65395f2b-fd20-43e9-860e-3a35033375bf","Type":"ContainerDied","Data":"f38242b277a2a8c10172715feb9edc687484e10ab61959c758fa871365ec3da6"} Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.370331 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.486597 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume\") pod \"65395f2b-fd20-43e9-860e-3a35033375bf\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.486683 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdn4b\" (UniqueName: \"kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b\") pod \"65395f2b-fd20-43e9-860e-3a35033375bf\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.486836 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume\") pod \"65395f2b-fd20-43e9-860e-3a35033375bf\" (UID: \"65395f2b-fd20-43e9-860e-3a35033375bf\") " Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.487562 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume" (OuterVolumeSpecName: "config-volume") pod "65395f2b-fd20-43e9-860e-3a35033375bf" (UID: "65395f2b-fd20-43e9-860e-3a35033375bf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.493255 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b" (OuterVolumeSpecName: "kube-api-access-hdn4b") pod "65395f2b-fd20-43e9-860e-3a35033375bf" (UID: "65395f2b-fd20-43e9-860e-3a35033375bf"). InnerVolumeSpecName "kube-api-access-hdn4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.498314 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "65395f2b-fd20-43e9-860e-3a35033375bf" (UID: "65395f2b-fd20-43e9-860e-3a35033375bf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.589008 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65395f2b-fd20-43e9-860e-3a35033375bf-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.589050 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdn4b\" (UniqueName: \"kubernetes.io/projected/65395f2b-fd20-43e9-860e-3a35033375bf-kube-api-access-hdn4b\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:03 crc kubenswrapper[4784]: I1205 13:00:03.589067 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65395f2b-fd20-43e9-860e-3a35033375bf-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:04 crc kubenswrapper[4784]: I1205 13:00:04.013945 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" event={"ID":"65395f2b-fd20-43e9-860e-3a35033375bf","Type":"ContainerDied","Data":"9b56c2c1cb6b1e0add4488052df9187ab2e1a533cec69d18091d97eca4d805f5"} Dec 05 13:00:04 crc kubenswrapper[4784]: I1205 13:00:04.014243 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b56c2c1cb6b1e0add4488052df9187ab2e1a533cec69d18091d97eca4d805f5" Dec 05 13:00:04 crc kubenswrapper[4784]: I1205 13:00:04.013983 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh" Dec 05 13:00:04 crc kubenswrapper[4784]: I1205 13:00:04.466957 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6"] Dec 05 13:00:04 crc kubenswrapper[4784]: I1205 13:00:04.481699 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415615-7t8s6"] Dec 05 13:00:05 crc kubenswrapper[4784]: I1205 13:00:05.235493 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0a51b7e-45fa-4c8b-9700-0872a5f49527" path="/var/lib/kubelet/pods/c0a51b7e-45fa-4c8b-9700-0872a5f49527/volumes" Dec 05 13:00:30 crc kubenswrapper[4784]: I1205 13:00:30.283902 4784 generic.go:334] "Generic (PLEG): container finished" podID="b0194359-b6ce-4590-b835-c81b0c992ca1" containerID="a66b35ec710a33bee6fa51f44fc7cda12e3fac0c4bb6d8d4b0935c8134a2ea2e" exitCode=0 Dec 05 13:00:30 crc kubenswrapper[4784]: I1205 13:00:30.284009 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" event={"ID":"b0194359-b6ce-4590-b835-c81b0c992ca1","Type":"ContainerDied","Data":"a66b35ec710a33bee6fa51f44fc7cda12e3fac0c4bb6d8d4b0935c8134a2ea2e"} Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.777859 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.928107 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbjm5\" (UniqueName: \"kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5\") pod \"b0194359-b6ce-4590-b835-c81b0c992ca1\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.928207 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key\") pod \"b0194359-b6ce-4590-b835-c81b0c992ca1\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.928250 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory\") pod \"b0194359-b6ce-4590-b835-c81b0c992ca1\" (UID: \"b0194359-b6ce-4590-b835-c81b0c992ca1\") " Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.937496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5" (OuterVolumeSpecName: "kube-api-access-nbjm5") pod "b0194359-b6ce-4590-b835-c81b0c992ca1" (UID: "b0194359-b6ce-4590-b835-c81b0c992ca1"). InnerVolumeSpecName "kube-api-access-nbjm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.963943 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0194359-b6ce-4590-b835-c81b0c992ca1" (UID: "b0194359-b6ce-4590-b835-c81b0c992ca1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:31 crc kubenswrapper[4784]: I1205 13:00:31.982026 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory" (OuterVolumeSpecName: "inventory") pod "b0194359-b6ce-4590-b835-c81b0c992ca1" (UID: "b0194359-b6ce-4590-b835-c81b0c992ca1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.031356 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbjm5\" (UniqueName: \"kubernetes.io/projected/b0194359-b6ce-4590-b835-c81b0c992ca1-kube-api-access-nbjm5\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.031384 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.031395 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0194359-b6ce-4590-b835-c81b0c992ca1-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.305446 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" event={"ID":"b0194359-b6ce-4590-b835-c81b0c992ca1","Type":"ContainerDied","Data":"269692f7ecc152c873e2e15240c5749df0b43238208a37bc8044bf8b3ea48163"} Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.305802 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="269692f7ecc152c873e2e15240c5749df0b43238208a37bc8044bf8b3ea48163" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.305528 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.398043 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-wzblc"] Dec 05 13:00:32 crc kubenswrapper[4784]: E1205 13:00:32.398431 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0194359-b6ce-4590-b835-c81b0c992ca1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.398448 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0194359-b6ce-4590-b835-c81b0c992ca1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:32 crc kubenswrapper[4784]: E1205 13:00:32.398470 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65395f2b-fd20-43e9-860e-3a35033375bf" containerName="collect-profiles" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.398476 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="65395f2b-fd20-43e9-860e-3a35033375bf" containerName="collect-profiles" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.398649 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="65395f2b-fd20-43e9-860e-3a35033375bf" containerName="collect-profiles" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.398665 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0194359-b6ce-4590-b835-c81b0c992ca1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.399326 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.404030 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.404368 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.404522 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.405204 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.420302 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-wzblc"] Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.454056 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5cv9\" (UniqueName: \"kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.454105 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.454219 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.554950 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.555060 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5cv9\" (UniqueName: \"kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.555086 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.564147 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.564624 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.571098 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5cv9\" (UniqueName: \"kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9\") pod \"ssh-known-hosts-edpm-deployment-wzblc\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:32 crc kubenswrapper[4784]: I1205 13:00:32.757585 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:33 crc kubenswrapper[4784]: I1205 13:00:33.320545 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-wzblc"] Dec 05 13:00:33 crc kubenswrapper[4784]: I1205 13:00:33.321078 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:00:34 crc kubenswrapper[4784]: I1205 13:00:34.335157 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" event={"ID":"304b4ee8-7619-47a1-970d-5fbeb6c24e96","Type":"ContainerStarted","Data":"6f9462dcb62517d7e3e07b6e7790a4d88348e9f5c60963f18f5fb027f6137c0c"} Dec 05 13:00:34 crc kubenswrapper[4784]: I1205 13:00:34.335865 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" event={"ID":"304b4ee8-7619-47a1-970d-5fbeb6c24e96","Type":"ContainerStarted","Data":"30d3e170c031fe237743bf1672f2b08d67ceb529828f864a79f948fabf017d33"} Dec 05 13:00:34 crc kubenswrapper[4784]: I1205 13:00:34.356214 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" podStartSLOduration=1.877912569 podStartE2EDuration="2.356184262s" podCreationTimestamp="2025-12-05 13:00:32 +0000 UTC" firstStartedPulling="2025-12-05 13:00:33.320841657 +0000 UTC m=+2112.740908472" lastFinishedPulling="2025-12-05 13:00:33.79911334 +0000 UTC m=+2113.219180165" observedRunningTime="2025-12-05 13:00:34.355259063 +0000 UTC m=+2113.775325878" watchObservedRunningTime="2025-12-05 13:00:34.356184262 +0000 UTC m=+2113.776251077" Dec 05 13:00:38 crc kubenswrapper[4784]: I1205 13:00:38.897557 4784 scope.go:117] "RemoveContainer" containerID="0ad12bf41a83ee38647a6d11038389b4ae671f3b9884634912ee190c9ef72755" Dec 05 13:00:41 crc kubenswrapper[4784]: I1205 13:00:41.405861 4784 generic.go:334] "Generic (PLEG): container finished" podID="304b4ee8-7619-47a1-970d-5fbeb6c24e96" containerID="6f9462dcb62517d7e3e07b6e7790a4d88348e9f5c60963f18f5fb027f6137c0c" exitCode=0 Dec 05 13:00:41 crc kubenswrapper[4784]: I1205 13:00:41.405954 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" event={"ID":"304b4ee8-7619-47a1-970d-5fbeb6c24e96","Type":"ContainerDied","Data":"6f9462dcb62517d7e3e07b6e7790a4d88348e9f5c60963f18f5fb027f6137c0c"} Dec 05 13:00:42 crc kubenswrapper[4784]: I1205 13:00:42.940261 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:42 crc kubenswrapper[4784]: I1205 13:00:42.982246 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam\") pod \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " Dec 05 13:00:42 crc kubenswrapper[4784]: I1205 13:00:42.982312 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5cv9\" (UniqueName: \"kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9\") pod \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " Dec 05 13:00:42 crc kubenswrapper[4784]: I1205 13:00:42.982365 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0\") pod \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\" (UID: \"304b4ee8-7619-47a1-970d-5fbeb6c24e96\") " Dec 05 13:00:42 crc kubenswrapper[4784]: I1205 13:00:42.987948 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9" (OuterVolumeSpecName: "kube-api-access-n5cv9") pod "304b4ee8-7619-47a1-970d-5fbeb6c24e96" (UID: "304b4ee8-7619-47a1-970d-5fbeb6c24e96"). InnerVolumeSpecName "kube-api-access-n5cv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.030617 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "304b4ee8-7619-47a1-970d-5fbeb6c24e96" (UID: "304b4ee8-7619-47a1-970d-5fbeb6c24e96"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.062728 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "304b4ee8-7619-47a1-970d-5fbeb6c24e96" (UID: "304b4ee8-7619-47a1-970d-5fbeb6c24e96"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.085047 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.085081 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5cv9\" (UniqueName: \"kubernetes.io/projected/304b4ee8-7619-47a1-970d-5fbeb6c24e96-kube-api-access-n5cv9\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.085093 4784 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/304b4ee8-7619-47a1-970d-5fbeb6c24e96-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.423771 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" event={"ID":"304b4ee8-7619-47a1-970d-5fbeb6c24e96","Type":"ContainerDied","Data":"30d3e170c031fe237743bf1672f2b08d67ceb529828f864a79f948fabf017d33"} Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.423812 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30d3e170c031fe237743bf1672f2b08d67ceb529828f864a79f948fabf017d33" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.423831 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-wzblc" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.525013 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x"] Dec 05 13:00:43 crc kubenswrapper[4784]: E1205 13:00:43.525694 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="304b4ee8-7619-47a1-970d-5fbeb6c24e96" containerName="ssh-known-hosts-edpm-deployment" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.525715 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="304b4ee8-7619-47a1-970d-5fbeb6c24e96" containerName="ssh-known-hosts-edpm-deployment" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.525917 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="304b4ee8-7619-47a1-970d-5fbeb6c24e96" containerName="ssh-known-hosts-edpm-deployment" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.526555 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.531524 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.531603 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.531875 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.531979 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.548099 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x"] Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.594521 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.594576 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq455\" (UniqueName: \"kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.594665 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.696383 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.696455 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq455\" (UniqueName: \"kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.696544 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.701659 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.701918 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.714566 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq455\" (UniqueName: \"kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-rk64x\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:43 crc kubenswrapper[4784]: I1205 13:00:43.841709 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:44 crc kubenswrapper[4784]: I1205 13:00:44.454525 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x"] Dec 05 13:00:45 crc kubenswrapper[4784]: I1205 13:00:45.443525 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" event={"ID":"238cfc14-62ab-498c-acc8-ec79cea43fa8","Type":"ContainerStarted","Data":"007b04b48c87cd5176b7115fa28ec864c4e496895eb57e13f4baa411789bc57c"} Dec 05 13:00:45 crc kubenswrapper[4784]: I1205 13:00:45.443875 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" event={"ID":"238cfc14-62ab-498c-acc8-ec79cea43fa8","Type":"ContainerStarted","Data":"8688991ba6f83027060b1062486337d076fb99b8f1f23fd2f1967c137b4e9fbd"} Dec 05 13:00:45 crc kubenswrapper[4784]: I1205 13:00:45.467402 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" podStartSLOduration=2.046933559 podStartE2EDuration="2.467374821s" podCreationTimestamp="2025-12-05 13:00:43 +0000 UTC" firstStartedPulling="2025-12-05 13:00:44.465654399 +0000 UTC m=+2123.885721214" lastFinishedPulling="2025-12-05 13:00:44.886095641 +0000 UTC m=+2124.306162476" observedRunningTime="2025-12-05 13:00:45.459400861 +0000 UTC m=+2124.879467676" watchObservedRunningTime="2025-12-05 13:00:45.467374821 +0000 UTC m=+2124.887441646" Dec 05 13:00:53 crc kubenswrapper[4784]: I1205 13:00:53.515069 4784 generic.go:334] "Generic (PLEG): container finished" podID="238cfc14-62ab-498c-acc8-ec79cea43fa8" containerID="007b04b48c87cd5176b7115fa28ec864c4e496895eb57e13f4baa411789bc57c" exitCode=0 Dec 05 13:00:53 crc kubenswrapper[4784]: I1205 13:00:53.515641 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" event={"ID":"238cfc14-62ab-498c-acc8-ec79cea43fa8","Type":"ContainerDied","Data":"007b04b48c87cd5176b7115fa28ec864c4e496895eb57e13f4baa411789bc57c"} Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.000794 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.118617 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory\") pod \"238cfc14-62ab-498c-acc8-ec79cea43fa8\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.118715 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key\") pod \"238cfc14-62ab-498c-acc8-ec79cea43fa8\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.118952 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lq455\" (UniqueName: \"kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455\") pod \"238cfc14-62ab-498c-acc8-ec79cea43fa8\" (UID: \"238cfc14-62ab-498c-acc8-ec79cea43fa8\") " Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.128441 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455" (OuterVolumeSpecName: "kube-api-access-lq455") pod "238cfc14-62ab-498c-acc8-ec79cea43fa8" (UID: "238cfc14-62ab-498c-acc8-ec79cea43fa8"). InnerVolumeSpecName "kube-api-access-lq455". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.161362 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "238cfc14-62ab-498c-acc8-ec79cea43fa8" (UID: "238cfc14-62ab-498c-acc8-ec79cea43fa8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.178845 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory" (OuterVolumeSpecName: "inventory") pod "238cfc14-62ab-498c-acc8-ec79cea43fa8" (UID: "238cfc14-62ab-498c-acc8-ec79cea43fa8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.221604 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lq455\" (UniqueName: \"kubernetes.io/projected/238cfc14-62ab-498c-acc8-ec79cea43fa8-kube-api-access-lq455\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.221650 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.221660 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/238cfc14-62ab-498c-acc8-ec79cea43fa8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.535134 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" event={"ID":"238cfc14-62ab-498c-acc8-ec79cea43fa8","Type":"ContainerDied","Data":"8688991ba6f83027060b1062486337d076fb99b8f1f23fd2f1967c137b4e9fbd"} Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.535176 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8688991ba6f83027060b1062486337d076fb99b8f1f23fd2f1967c137b4e9fbd" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.535200 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-rk64x" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.611869 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx"] Dec 05 13:00:55 crc kubenswrapper[4784]: E1205 13:00:55.613161 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238cfc14-62ab-498c-acc8-ec79cea43fa8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.613182 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="238cfc14-62ab-498c-acc8-ec79cea43fa8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.613413 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="238cfc14-62ab-498c-acc8-ec79cea43fa8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.614129 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.616324 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.616520 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.616559 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.616927 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.626374 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx"] Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.731456 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.731530 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xd2l\" (UniqueName: \"kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.731587 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.833589 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.833689 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xd2l\" (UniqueName: \"kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.833767 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.839401 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.847832 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.855214 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xd2l\" (UniqueName: \"kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:55 crc kubenswrapper[4784]: I1205 13:00:55.936043 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:00:56 crc kubenswrapper[4784]: I1205 13:00:56.470546 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx"] Dec 05 13:00:56 crc kubenswrapper[4784]: I1205 13:00:56.545540 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" event={"ID":"5f5f986e-311c-41da-aae4-18d6f3520749","Type":"ContainerStarted","Data":"8e3ac455f3034f23f35bdde0842d7268792ae2825b9555cfdd5b37fdfe8f56a8"} Dec 05 13:00:57 crc kubenswrapper[4784]: I1205 13:00:57.555783 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" event={"ID":"5f5f986e-311c-41da-aae4-18d6f3520749","Type":"ContainerStarted","Data":"39b432f386ae0f534623ec6ec0f3545428f1ce04973cf8f3a951e736d01d5086"} Dec 05 13:00:57 crc kubenswrapper[4784]: I1205 13:00:57.574989 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" podStartSLOduration=2.134070369 podStartE2EDuration="2.574970661s" podCreationTimestamp="2025-12-05 13:00:55 +0000 UTC" firstStartedPulling="2025-12-05 13:00:56.473370685 +0000 UTC m=+2135.893437510" lastFinishedPulling="2025-12-05 13:00:56.914270987 +0000 UTC m=+2136.334337802" observedRunningTime="2025-12-05 13:00:57.573033611 +0000 UTC m=+2136.993100426" watchObservedRunningTime="2025-12-05 13:00:57.574970661 +0000 UTC m=+2136.995037476" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.157946 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415661-ckq8x"] Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.161345 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.178238 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415661-ckq8x"] Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.235222 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2hmg\" (UniqueName: \"kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.235317 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.235366 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.235387 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.336678 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2hmg\" (UniqueName: \"kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.336789 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.336840 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.336861 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.344307 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.346653 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.355162 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2hmg\" (UniqueName: \"kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.361541 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data\") pod \"keystone-cron-29415661-ckq8x\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:00 crc kubenswrapper[4784]: I1205 13:01:00.531056 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:01 crc kubenswrapper[4784]: W1205 13:01:01.027076 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0090d182_b58b_4c0b_83b0_82ce94675e65.slice/crio-f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024 WatchSource:0}: Error finding container f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024: Status 404 returned error can't find the container with id f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024 Dec 05 13:01:01 crc kubenswrapper[4784]: I1205 13:01:01.036104 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415661-ckq8x"] Dec 05 13:01:01 crc kubenswrapper[4784]: I1205 13:01:01.602618 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-ckq8x" event={"ID":"0090d182-b58b-4c0b-83b0-82ce94675e65","Type":"ContainerStarted","Data":"88bbd8f91c202b2b4e44bbb0ebbcaf5274d40fd7894d6177f7cf26f2ba00d31f"} Dec 05 13:01:01 crc kubenswrapper[4784]: I1205 13:01:01.603054 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-ckq8x" event={"ID":"0090d182-b58b-4c0b-83b0-82ce94675e65","Type":"ContainerStarted","Data":"f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024"} Dec 05 13:01:01 crc kubenswrapper[4784]: I1205 13:01:01.637667 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415661-ckq8x" podStartSLOduration=1.637634976 podStartE2EDuration="1.637634976s" podCreationTimestamp="2025-12-05 13:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:01:01.633599389 +0000 UTC m=+2141.053666204" watchObservedRunningTime="2025-12-05 13:01:01.637634976 +0000 UTC m=+2141.057701821" Dec 05 13:01:04 crc kubenswrapper[4784]: I1205 13:01:04.630309 4784 generic.go:334] "Generic (PLEG): container finished" podID="0090d182-b58b-4c0b-83b0-82ce94675e65" containerID="88bbd8f91c202b2b4e44bbb0ebbcaf5274d40fd7894d6177f7cf26f2ba00d31f" exitCode=0 Dec 05 13:01:04 crc kubenswrapper[4784]: I1205 13:01:04.630614 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-ckq8x" event={"ID":"0090d182-b58b-4c0b-83b0-82ce94675e65","Type":"ContainerDied","Data":"88bbd8f91c202b2b4e44bbb0ebbcaf5274d40fd7894d6177f7cf26f2ba00d31f"} Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.039972 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.158696 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data\") pod \"0090d182-b58b-4c0b-83b0-82ce94675e65\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.158770 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2hmg\" (UniqueName: \"kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg\") pod \"0090d182-b58b-4c0b-83b0-82ce94675e65\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.158811 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle\") pod \"0090d182-b58b-4c0b-83b0-82ce94675e65\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.159027 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys\") pod \"0090d182-b58b-4c0b-83b0-82ce94675e65\" (UID: \"0090d182-b58b-4c0b-83b0-82ce94675e65\") " Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.166333 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg" (OuterVolumeSpecName: "kube-api-access-f2hmg") pod "0090d182-b58b-4c0b-83b0-82ce94675e65" (UID: "0090d182-b58b-4c0b-83b0-82ce94675e65"). InnerVolumeSpecName "kube-api-access-f2hmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.166532 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0090d182-b58b-4c0b-83b0-82ce94675e65" (UID: "0090d182-b58b-4c0b-83b0-82ce94675e65"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.212539 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0090d182-b58b-4c0b-83b0-82ce94675e65" (UID: "0090d182-b58b-4c0b-83b0-82ce94675e65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.222449 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data" (OuterVolumeSpecName: "config-data") pod "0090d182-b58b-4c0b-83b0-82ce94675e65" (UID: "0090d182-b58b-4c0b-83b0-82ce94675e65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.262113 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.262165 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2hmg\" (UniqueName: \"kubernetes.io/projected/0090d182-b58b-4c0b-83b0-82ce94675e65-kube-api-access-f2hmg\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.262203 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.262216 4784 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0090d182-b58b-4c0b-83b0-82ce94675e65-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.649419 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415661-ckq8x" event={"ID":"0090d182-b58b-4c0b-83b0-82ce94675e65","Type":"ContainerDied","Data":"f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024"} Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.649459 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f96dfcce3cb5dbb460b2330de5a3cea0da806fd2abee0fb8f59444fa431e3024" Dec 05 13:01:06 crc kubenswrapper[4784]: I1205 13:01:06.649459 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415661-ckq8x" Dec 05 13:01:08 crc kubenswrapper[4784]: I1205 13:01:08.667548 4784 generic.go:334] "Generic (PLEG): container finished" podID="5f5f986e-311c-41da-aae4-18d6f3520749" containerID="39b432f386ae0f534623ec6ec0f3545428f1ce04973cf8f3a951e736d01d5086" exitCode=0 Dec 05 13:01:08 crc kubenswrapper[4784]: I1205 13:01:08.667626 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" event={"ID":"5f5f986e-311c-41da-aae4-18d6f3520749","Type":"ContainerDied","Data":"39b432f386ae0f534623ec6ec0f3545428f1ce04973cf8f3a951e736d01d5086"} Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.157511 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.251797 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xd2l\" (UniqueName: \"kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l\") pod \"5f5f986e-311c-41da-aae4-18d6f3520749\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.251842 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory\") pod \"5f5f986e-311c-41da-aae4-18d6f3520749\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.251976 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key\") pod \"5f5f986e-311c-41da-aae4-18d6f3520749\" (UID: \"5f5f986e-311c-41da-aae4-18d6f3520749\") " Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.262341 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l" (OuterVolumeSpecName: "kube-api-access-6xd2l") pod "5f5f986e-311c-41da-aae4-18d6f3520749" (UID: "5f5f986e-311c-41da-aae4-18d6f3520749"). InnerVolumeSpecName "kube-api-access-6xd2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.286277 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory" (OuterVolumeSpecName: "inventory") pod "5f5f986e-311c-41da-aae4-18d6f3520749" (UID: "5f5f986e-311c-41da-aae4-18d6f3520749"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.290524 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f5f986e-311c-41da-aae4-18d6f3520749" (UID: "5f5f986e-311c-41da-aae4-18d6f3520749"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.353855 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xd2l\" (UniqueName: \"kubernetes.io/projected/5f5f986e-311c-41da-aae4-18d6f3520749-kube-api-access-6xd2l\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.353887 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.353898 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f5f986e-311c-41da-aae4-18d6f3520749-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.691792 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" event={"ID":"5f5f986e-311c-41da-aae4-18d6f3520749","Type":"ContainerDied","Data":"8e3ac455f3034f23f35bdde0842d7268792ae2825b9555cfdd5b37fdfe8f56a8"} Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.691830 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e3ac455f3034f23f35bdde0842d7268792ae2825b9555cfdd5b37fdfe8f56a8" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.691850 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.788999 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5"] Dec 05 13:01:10 crc kubenswrapper[4784]: E1205 13:01:10.789446 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f5f986e-311c-41da-aae4-18d6f3520749" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.789478 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f5f986e-311c-41da-aae4-18d6f3520749" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:10 crc kubenswrapper[4784]: E1205 13:01:10.789490 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0090d182-b58b-4c0b-83b0-82ce94675e65" containerName="keystone-cron" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.789496 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0090d182-b58b-4c0b-83b0-82ce94675e65" containerName="keystone-cron" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.789686 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="0090d182-b58b-4c0b-83b0-82ce94675e65" containerName="keystone-cron" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.789715 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f5f986e-311c-41da-aae4-18d6f3520749" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.790364 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.794764 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.795305 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.795373 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.795376 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.795464 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.795987 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.796171 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.799911 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.809671 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5"] Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.965540 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.965596 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.965727 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.965804 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966042 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djn2w\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966096 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966122 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966244 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966293 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966336 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966386 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966424 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966459 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:10 crc kubenswrapper[4784]: I1205 13:01:10.966550 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.068527 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.068627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.068674 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.068755 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djn2w\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069261 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069292 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069322 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069340 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069359 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069382 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069400 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069418 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069467 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.069496 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.074133 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.077991 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.078347 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.078405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.079616 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.079708 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.079862 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.080169 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.080488 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.080737 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.082962 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.083069 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.084624 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.087339 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djn2w\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.120608 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.680854 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5"] Dec 05 13:01:11 crc kubenswrapper[4784]: I1205 13:01:11.713040 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" event={"ID":"8021b39a-1235-4fb9-8ef4-ae1ff51e7835","Type":"ContainerStarted","Data":"692a600c579a7665bb8aad07d1d04e637d41dc56f5ae01cca14041bd41403988"} Dec 05 13:01:12 crc kubenswrapper[4784]: I1205 13:01:12.726413 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" event={"ID":"8021b39a-1235-4fb9-8ef4-ae1ff51e7835","Type":"ContainerStarted","Data":"e1c0d9204370f1210fdca37a0e24f979159e8228d71ea9602033d9070029d47a"} Dec 05 13:01:12 crc kubenswrapper[4784]: I1205 13:01:12.760619 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" podStartSLOduration=2.335911981 podStartE2EDuration="2.760592079s" podCreationTimestamp="2025-12-05 13:01:10 +0000 UTC" firstStartedPulling="2025-12-05 13:01:11.688007333 +0000 UTC m=+2151.108074158" lastFinishedPulling="2025-12-05 13:01:12.112687401 +0000 UTC m=+2151.532754256" observedRunningTime="2025-12-05 13:01:12.750387369 +0000 UTC m=+2152.170454244" watchObservedRunningTime="2025-12-05 13:01:12.760592079 +0000 UTC m=+2152.180658934" Dec 05 13:01:15 crc kubenswrapper[4784]: I1205 13:01:15.932235 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:15 crc kubenswrapper[4784]: I1205 13:01:15.941827 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:15 crc kubenswrapper[4784]: I1205 13:01:15.987073 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.069658 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.069787 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt88z\" (UniqueName: \"kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.069840 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.172561 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.172703 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt88z\" (UniqueName: \"kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.172738 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.173077 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.173167 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.192110 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt88z\" (UniqueName: \"kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z\") pod \"certified-operators-k7ng7\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.275415 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:16 crc kubenswrapper[4784]: I1205 13:01:16.762740 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:17 crc kubenswrapper[4784]: I1205 13:01:17.778913 4784 generic.go:334] "Generic (PLEG): container finished" podID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerID="6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737" exitCode=0 Dec 05 13:01:17 crc kubenswrapper[4784]: I1205 13:01:17.778982 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerDied","Data":"6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737"} Dec 05 13:01:17 crc kubenswrapper[4784]: I1205 13:01:17.779359 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerStarted","Data":"d519c2eec98a613773842676ee360328a5b65aef8a08fb3bb57f2ed29e45e0d7"} Dec 05 13:01:18 crc kubenswrapper[4784]: I1205 13:01:18.791019 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerStarted","Data":"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf"} Dec 05 13:01:19 crc kubenswrapper[4784]: I1205 13:01:19.807491 4784 generic.go:334] "Generic (PLEG): container finished" podID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerID="741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf" exitCode=0 Dec 05 13:01:19 crc kubenswrapper[4784]: I1205 13:01:19.807908 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerDied","Data":"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf"} Dec 05 13:01:20 crc kubenswrapper[4784]: I1205 13:01:20.821492 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerStarted","Data":"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14"} Dec 05 13:01:20 crc kubenswrapper[4784]: I1205 13:01:20.849945 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k7ng7" podStartSLOduration=3.40328246 podStartE2EDuration="5.849919227s" podCreationTimestamp="2025-12-05 13:01:15 +0000 UTC" firstStartedPulling="2025-12-05 13:01:17.781389329 +0000 UTC m=+2157.201456144" lastFinishedPulling="2025-12-05 13:01:20.228026086 +0000 UTC m=+2159.648092911" observedRunningTime="2025-12-05 13:01:20.844621361 +0000 UTC m=+2160.264688206" watchObservedRunningTime="2025-12-05 13:01:20.849919227 +0000 UTC m=+2160.269986052" Dec 05 13:01:26 crc kubenswrapper[4784]: I1205 13:01:26.275658 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:26 crc kubenswrapper[4784]: I1205 13:01:26.276234 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:26 crc kubenswrapper[4784]: I1205 13:01:26.320821 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:26 crc kubenswrapper[4784]: I1205 13:01:26.930944 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:26 crc kubenswrapper[4784]: I1205 13:01:26.979269 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:28 crc kubenswrapper[4784]: I1205 13:01:28.900117 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k7ng7" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="registry-server" containerID="cri-o://952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14" gracePeriod=2 Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.344741 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.457622 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities" (OuterVolumeSpecName: "utilities") pod "7eb8f680-76b0-46e9-a523-61753ddc5e13" (UID: "7eb8f680-76b0-46e9-a523-61753ddc5e13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.461421 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities\") pod \"7eb8f680-76b0-46e9-a523-61753ddc5e13\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.461585 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt88z\" (UniqueName: \"kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z\") pod \"7eb8f680-76b0-46e9-a523-61753ddc5e13\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.461717 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content\") pod \"7eb8f680-76b0-46e9-a523-61753ddc5e13\" (UID: \"7eb8f680-76b0-46e9-a523-61753ddc5e13\") " Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.463318 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.469908 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z" (OuterVolumeSpecName: "kube-api-access-nt88z") pod "7eb8f680-76b0-46e9-a523-61753ddc5e13" (UID: "7eb8f680-76b0-46e9-a523-61753ddc5e13"). InnerVolumeSpecName "kube-api-access-nt88z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.517045 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7eb8f680-76b0-46e9-a523-61753ddc5e13" (UID: "7eb8f680-76b0-46e9-a523-61753ddc5e13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.576846 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.576901 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.577205 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt88z\" (UniqueName: \"kubernetes.io/projected/7eb8f680-76b0-46e9-a523-61753ddc5e13-kube-api-access-nt88z\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.577236 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb8f680-76b0-46e9-a523-61753ddc5e13-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.911475 4784 generic.go:334] "Generic (PLEG): container finished" podID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerID="952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14" exitCode=0 Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.911540 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerDied","Data":"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14"} Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.911615 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7ng7" event={"ID":"7eb8f680-76b0-46e9-a523-61753ddc5e13","Type":"ContainerDied","Data":"d519c2eec98a613773842676ee360328a5b65aef8a08fb3bb57f2ed29e45e0d7"} Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.911645 4784 scope.go:117] "RemoveContainer" containerID="952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.912871 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7ng7" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.930600 4784 scope.go:117] "RemoveContainer" containerID="741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.950804 4784 scope.go:117] "RemoveContainer" containerID="6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737" Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.965644 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:29 crc kubenswrapper[4784]: I1205 13:01:29.977482 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k7ng7"] Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.016602 4784 scope.go:117] "RemoveContainer" containerID="952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14" Dec 05 13:01:30 crc kubenswrapper[4784]: E1205 13:01:30.017110 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14\": container with ID starting with 952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14 not found: ID does not exist" containerID="952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14" Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.017256 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14"} err="failed to get container status \"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14\": rpc error: code = NotFound desc = could not find container \"952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14\": container with ID starting with 952203191ba730da6e64b52fa7feb1cf3d0e7fb97f518c3b3d2852fbadf22d14 not found: ID does not exist" Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.017352 4784 scope.go:117] "RemoveContainer" containerID="741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf" Dec 05 13:01:30 crc kubenswrapper[4784]: E1205 13:01:30.017888 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf\": container with ID starting with 741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf not found: ID does not exist" containerID="741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf" Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.017987 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf"} err="failed to get container status \"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf\": rpc error: code = NotFound desc = could not find container \"741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf\": container with ID starting with 741bcf3f10a0258201ee78c46dcc7cde60801a73ad09d443db6923a0b81b86bf not found: ID does not exist" Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.018055 4784 scope.go:117] "RemoveContainer" containerID="6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737" Dec 05 13:01:30 crc kubenswrapper[4784]: E1205 13:01:30.019356 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737\": container with ID starting with 6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737 not found: ID does not exist" containerID="6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737" Dec 05 13:01:30 crc kubenswrapper[4784]: I1205 13:01:30.019400 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737"} err="failed to get container status \"6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737\": rpc error: code = NotFound desc = could not find container \"6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737\": container with ID starting with 6370294e3c3a0f71a3e843cb303fead883ffb7bf646605260b3ac4ddd961f737 not found: ID does not exist" Dec 05 13:01:31 crc kubenswrapper[4784]: I1205 13:01:31.017874 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" path="/var/lib/kubelet/pods/7eb8f680-76b0-46e9-a523-61753ddc5e13/volumes" Dec 05 13:01:52 crc kubenswrapper[4784]: I1205 13:01:52.160262 4784 generic.go:334] "Generic (PLEG): container finished" podID="8021b39a-1235-4fb9-8ef4-ae1ff51e7835" containerID="e1c0d9204370f1210fdca37a0e24f979159e8228d71ea9602033d9070029d47a" exitCode=0 Dec 05 13:01:52 crc kubenswrapper[4784]: I1205 13:01:52.160378 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" event={"ID":"8021b39a-1235-4fb9-8ef4-ae1ff51e7835","Type":"ContainerDied","Data":"e1c0d9204370f1210fdca37a0e24f979159e8228d71ea9602033d9070029d47a"} Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.624414 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676214 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676296 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676642 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djn2w\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676691 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676738 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676824 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676871 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676937 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.676988 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.677048 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.677141 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.677182 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.677244 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.677299 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle\") pod \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\" (UID: \"8021b39a-1235-4fb9-8ef4-ae1ff51e7835\") " Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.682945 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.685057 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.685076 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.688241 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.689442 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.689911 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.694763 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.694867 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.694916 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.696456 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.696920 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.697956 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w" (OuterVolumeSpecName: "kube-api-access-djn2w") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "kube-api-access-djn2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.763901 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.765693 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory" (OuterVolumeSpecName: "inventory") pod "8021b39a-1235-4fb9-8ef4-ae1ff51e7835" (UID: "8021b39a-1235-4fb9-8ef4-ae1ff51e7835"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779401 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779436 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djn2w\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-kube-api-access-djn2w\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779453 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779469 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779487 4784 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779500 4784 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779513 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779526 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779562 4784 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779575 4784 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779587 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779600 4784 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779612 4784 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:53 crc kubenswrapper[4784]: I1205 13:01:53.779623 4784 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8021b39a-1235-4fb9-8ef4-ae1ff51e7835-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.184718 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" event={"ID":"8021b39a-1235-4fb9-8ef4-ae1ff51e7835","Type":"ContainerDied","Data":"692a600c579a7665bb8aad07d1d04e637d41dc56f5ae01cca14041bd41403988"} Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.184982 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="692a600c579a7665bb8aad07d1d04e637d41dc56f5ae01cca14041bd41403988" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.185013 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.346853 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79"] Dec 05 13:01:54 crc kubenswrapper[4784]: E1205 13:01:54.347335 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8021b39a-1235-4fb9-8ef4-ae1ff51e7835" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347361 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8021b39a-1235-4fb9-8ef4-ae1ff51e7835" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:54 crc kubenswrapper[4784]: E1205 13:01:54.347395 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="extract-content" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347405 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="extract-content" Dec 05 13:01:54 crc kubenswrapper[4784]: E1205 13:01:54.347424 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="extract-utilities" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347432 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="extract-utilities" Dec 05 13:01:54 crc kubenswrapper[4784]: E1205 13:01:54.347449 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="registry-server" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347456 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="registry-server" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347674 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8021b39a-1235-4fb9-8ef4-ae1ff51e7835" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.347707 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb8f680-76b0-46e9-a523-61753ddc5e13" containerName="registry-server" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.348502 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.353164 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.353174 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.353331 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.353177 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.354050 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.363482 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79"] Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.389662 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.389754 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.389973 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.390007 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgbtt\" (UniqueName: \"kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.390068 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.492091 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.492230 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.492261 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.492375 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.492402 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgbtt\" (UniqueName: \"kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.494398 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.497394 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.498487 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.499104 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.514524 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgbtt\" (UniqueName: \"kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhm79\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:54 crc kubenswrapper[4784]: I1205 13:01:54.669691 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:01:55 crc kubenswrapper[4784]: I1205 13:01:55.232067 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79"] Dec 05 13:01:55 crc kubenswrapper[4784]: W1205 13:01:55.235903 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8014c4e6_3539_4d7f_95c3_bb37c4a1e08e.slice/crio-b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244 WatchSource:0}: Error finding container b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244: Status 404 returned error can't find the container with id b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244 Dec 05 13:01:56 crc kubenswrapper[4784]: I1205 13:01:56.210622 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" event={"ID":"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e","Type":"ContainerStarted","Data":"c02ac678a404da6605a91a8f8185f3cc3da818f820416e8d431d521ef077b640"} Dec 05 13:01:56 crc kubenswrapper[4784]: I1205 13:01:56.210953 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" event={"ID":"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e","Type":"ContainerStarted","Data":"b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244"} Dec 05 13:01:56 crc kubenswrapper[4784]: I1205 13:01:56.229705 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" podStartSLOduration=1.6179353029999999 podStartE2EDuration="2.229684307s" podCreationTimestamp="2025-12-05 13:01:54 +0000 UTC" firstStartedPulling="2025-12-05 13:01:55.238407711 +0000 UTC m=+2194.658474526" lastFinishedPulling="2025-12-05 13:01:55.850156705 +0000 UTC m=+2195.270223530" observedRunningTime="2025-12-05 13:01:56.226789097 +0000 UTC m=+2195.646855942" watchObservedRunningTime="2025-12-05 13:01:56.229684307 +0000 UTC m=+2195.649751142" Dec 05 13:01:59 crc kubenswrapper[4784]: I1205 13:01:59.572306 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:01:59 crc kubenswrapper[4784]: I1205 13:01:59.572666 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:02:29 crc kubenswrapper[4784]: I1205 13:02:29.572033 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:02:29 crc kubenswrapper[4784]: I1205 13:02:29.572614 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:02:29 crc kubenswrapper[4784]: I1205 13:02:29.572654 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:02:29 crc kubenswrapper[4784]: I1205 13:02:29.573691 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:02:29 crc kubenswrapper[4784]: I1205 13:02:29.573774 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" gracePeriod=600 Dec 05 13:02:29 crc kubenswrapper[4784]: E1205 13:02:29.704457 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:02:30 crc kubenswrapper[4784]: I1205 13:02:30.575786 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" exitCode=0 Dec 05 13:02:30 crc kubenswrapper[4784]: I1205 13:02:30.575877 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9"} Dec 05 13:02:30 crc kubenswrapper[4784]: I1205 13:02:30.576163 4784 scope.go:117] "RemoveContainer" containerID="530d1b87a942e0fe217bf312e1b52cd3f9bc07f40f420aae148e7f7c0b99a93a" Dec 05 13:02:30 crc kubenswrapper[4784]: I1205 13:02:30.576809 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:02:30 crc kubenswrapper[4784]: E1205 13:02:30.577090 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:02:44 crc kubenswrapper[4784]: I1205 13:02:43.999576 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:02:44 crc kubenswrapper[4784]: E1205 13:02:44.000760 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:02:57 crc kubenswrapper[4784]: I1205 13:02:57.998721 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:02:58 crc kubenswrapper[4784]: E1205 13:02:57.999554 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:02 crc kubenswrapper[4784]: I1205 13:03:02.987853 4784 generic.go:334] "Generic (PLEG): container finished" podID="8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" containerID="c02ac678a404da6605a91a8f8185f3cc3da818f820416e8d431d521ef077b640" exitCode=0 Dec 05 13:03:02 crc kubenswrapper[4784]: I1205 13:03:02.987986 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" event={"ID":"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e","Type":"ContainerDied","Data":"c02ac678a404da6605a91a8f8185f3cc3da818f820416e8d431d521ef077b640"} Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.415277 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.465152 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key\") pod \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.465219 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle\") pod \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.465295 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgbtt\" (UniqueName: \"kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt\") pod \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.465347 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory\") pod \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.465433 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0\") pod \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\" (UID: \"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e\") " Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.474031 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt" (OuterVolumeSpecName: "kube-api-access-sgbtt") pod "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" (UID: "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e"). InnerVolumeSpecName "kube-api-access-sgbtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.478591 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" (UID: "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.507287 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" (UID: "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.512972 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory" (OuterVolumeSpecName: "inventory") pod "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" (UID: "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.515560 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" (UID: "8014c4e6-3539-4d7f-95c3-bb37c4a1e08e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.567711 4784 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.567989 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.568003 4784 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.568018 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgbtt\" (UniqueName: \"kubernetes.io/projected/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-kube-api-access-sgbtt\") on node \"crc\" DevicePath \"\"" Dec 05 13:03:04 crc kubenswrapper[4784]: I1205 13:03:04.568031 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8014c4e6-3539-4d7f-95c3-bb37c4a1e08e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.010024 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.023648 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhm79" event={"ID":"8014c4e6-3539-4d7f-95c3-bb37c4a1e08e","Type":"ContainerDied","Data":"b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244"} Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.023788 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6e3a865730a23e8dde653769b92ae01ace6ee51bea7cad96aeee2418b415244" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.149168 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts"] Dec 05 13:03:05 crc kubenswrapper[4784]: E1205 13:03:05.149736 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.149770 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.150044 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8014c4e6-3539-4d7f-95c3-bb37c4a1e08e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.150723 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.156867 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.157138 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.157477 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.157698 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.157881 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.158160 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.180580 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.180752 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spzk5\" (UniqueName: \"kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.180896 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.180929 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.182015 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.182105 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.182435 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts"] Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.283989 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.284050 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.284121 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.284251 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spzk5\" (UniqueName: \"kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.284290 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.284307 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.288016 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.288365 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.288844 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.290665 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.294891 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.312049 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spzk5\" (UniqueName: \"kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:05 crc kubenswrapper[4784]: I1205 13:03:05.488924 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:03:06 crc kubenswrapper[4784]: I1205 13:03:06.090421 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts"] Dec 05 13:03:06 crc kubenswrapper[4784]: W1205 13:03:06.092990 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab97b4b4_1696_43ea_b462_56bcd34dda98.slice/crio-9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c WatchSource:0}: Error finding container 9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c: Status 404 returned error can't find the container with id 9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c Dec 05 13:03:07 crc kubenswrapper[4784]: I1205 13:03:07.034096 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" event={"ID":"ab97b4b4-1696-43ea-b462-56bcd34dda98","Type":"ContainerStarted","Data":"3a19be1b64f6b731a1aa0405139b5c77040f60ecc51b2368ce16fd9e16a938b8"} Dec 05 13:03:07 crc kubenswrapper[4784]: I1205 13:03:07.034841 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" event={"ID":"ab97b4b4-1696-43ea-b462-56bcd34dda98","Type":"ContainerStarted","Data":"9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c"} Dec 05 13:03:07 crc kubenswrapper[4784]: I1205 13:03:07.058675 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" podStartSLOduration=1.552524981 podStartE2EDuration="2.058648224s" podCreationTimestamp="2025-12-05 13:03:05 +0000 UTC" firstStartedPulling="2025-12-05 13:03:06.097058978 +0000 UTC m=+2265.517125843" lastFinishedPulling="2025-12-05 13:03:06.603182251 +0000 UTC m=+2266.023249086" observedRunningTime="2025-12-05 13:03:07.053032458 +0000 UTC m=+2266.473099283" watchObservedRunningTime="2025-12-05 13:03:07.058648224 +0000 UTC m=+2266.478715059" Dec 05 13:03:12 crc kubenswrapper[4784]: I1205 13:03:11.999098 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:03:12 crc kubenswrapper[4784]: E1205 13:03:12.000339 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:23 crc kubenswrapper[4784]: I1205 13:03:23.998979 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:03:24 crc kubenswrapper[4784]: E1205 13:03:23.999923 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:34 crc kubenswrapper[4784]: I1205 13:03:34.998702 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:03:35 crc kubenswrapper[4784]: E1205 13:03:34.999601 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:48 crc kubenswrapper[4784]: I1205 13:03:47.999537 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:03:48 crc kubenswrapper[4784]: E1205 13:03:48.000595 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:59 crc kubenswrapper[4784]: I1205 13:03:58.999840 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:03:59 crc kubenswrapper[4784]: E1205 13:03:59.000475 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:03:59 crc kubenswrapper[4784]: I1205 13:03:59.614699 4784 generic.go:334] "Generic (PLEG): container finished" podID="ab97b4b4-1696-43ea-b462-56bcd34dda98" containerID="3a19be1b64f6b731a1aa0405139b5c77040f60ecc51b2368ce16fd9e16a938b8" exitCode=0 Dec 05 13:03:59 crc kubenswrapper[4784]: I1205 13:03:59.614775 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" event={"ID":"ab97b4b4-1696-43ea-b462-56bcd34dda98","Type":"ContainerDied","Data":"3a19be1b64f6b731a1aa0405139b5c77040f60ecc51b2368ce16fd9e16a938b8"} Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.114789 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.220432 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.220497 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spzk5\" (UniqueName: \"kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.220534 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.221514 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.221567 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.221750 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.229947 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5" (OuterVolumeSpecName: "kube-api-access-spzk5") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "kube-api-access-spzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.241122 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.256021 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: E1205 13:04:01.261652 4784 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory podName:ab97b4b4-1696-43ea-b462-56bcd34dda98 nodeName:}" failed. No retries permitted until 2025-12-05 13:04:01.761601693 +0000 UTC m=+2321.181668498 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98") : error deleting /var/lib/kubelet/pods/ab97b4b4-1696-43ea-b462-56bcd34dda98/volume-subpaths: remove /var/lib/kubelet/pods/ab97b4b4-1696-43ea-b462-56bcd34dda98/volume-subpaths: no such file or directory Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.264983 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.265400 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.324549 4784 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.324584 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spzk5\" (UniqueName: \"kubernetes.io/projected/ab97b4b4-1696-43ea-b462-56bcd34dda98-kube-api-access-spzk5\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.324594 4784 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.324603 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.324612 4784 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.636849 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" event={"ID":"ab97b4b4-1696-43ea-b462-56bcd34dda98","Type":"ContainerDied","Data":"9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c"} Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.636896 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.636899 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9799d6547ec8251041476a80c549aefa11cd193a5d6f1a24a6ad51388e8fbb3c" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.746484 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d"] Dec 05 13:04:01 crc kubenswrapper[4784]: E1205 13:04:01.746919 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab97b4b4-1696-43ea-b462-56bcd34dda98" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.746937 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab97b4b4-1696-43ea-b462-56bcd34dda98" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.747146 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab97b4b4-1696-43ea-b462-56bcd34dda98" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.747852 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.751040 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.760490 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d"] Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831235 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") pod \"ab97b4b4-1696-43ea-b462-56bcd34dda98\" (UID: \"ab97b4b4-1696-43ea-b462-56bcd34dda98\") " Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831483 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831748 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831851 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bpmc\" (UniqueName: \"kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831955 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.831988 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.835288 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory" (OuterVolumeSpecName: "inventory") pod "ab97b4b4-1696-43ea-b462-56bcd34dda98" (UID: "ab97b4b4-1696-43ea-b462-56bcd34dda98"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.933791 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.933872 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bpmc\" (UniqueName: \"kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.933931 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.933955 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.934081 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.934203 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab97b4b4-1696-43ea-b462-56bcd34dda98-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.938143 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.938158 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.940939 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.942418 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:01 crc kubenswrapper[4784]: I1205 13:04:01.950307 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bpmc\" (UniqueName: \"kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:02 crc kubenswrapper[4784]: I1205 13:04:02.067563 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:04:02 crc kubenswrapper[4784]: I1205 13:04:02.649348 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d"] Dec 05 13:04:02 crc kubenswrapper[4784]: I1205 13:04:02.652171 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" event={"ID":"ba1cfa9d-6665-4a66-a134-28fae26e36a2","Type":"ContainerStarted","Data":"1db5dd288f587d1539272efb5aea6b8b922551169fdde0c07a1bbc8ddd53d681"} Dec 05 13:04:03 crc kubenswrapper[4784]: I1205 13:04:03.662270 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" event={"ID":"ba1cfa9d-6665-4a66-a134-28fae26e36a2","Type":"ContainerStarted","Data":"ae045b33fdc111b7b44ed8bbe93d291d9af973d5e6ded43e4833d23212b272a8"} Dec 05 13:04:03 crc kubenswrapper[4784]: I1205 13:04:03.689815 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" podStartSLOduration=2.232016214 podStartE2EDuration="2.68979763s" podCreationTimestamp="2025-12-05 13:04:01 +0000 UTC" firstStartedPulling="2025-12-05 13:04:02.641783205 +0000 UTC m=+2322.061850020" lastFinishedPulling="2025-12-05 13:04:03.099564621 +0000 UTC m=+2322.519631436" observedRunningTime="2025-12-05 13:04:03.684948288 +0000 UTC m=+2323.105015123" watchObservedRunningTime="2025-12-05 13:04:03.68979763 +0000 UTC m=+2323.109864445" Dec 05 13:04:13 crc kubenswrapper[4784]: I1205 13:04:13.998883 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:04:14 crc kubenswrapper[4784]: E1205 13:04:13.999712 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:04:28 crc kubenswrapper[4784]: I1205 13:04:28.003216 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:04:28 crc kubenswrapper[4784]: E1205 13:04:28.004466 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:04:38 crc kubenswrapper[4784]: I1205 13:04:38.998923 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:04:39 crc kubenswrapper[4784]: E1205 13:04:38.999773 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:04:51 crc kubenswrapper[4784]: I1205 13:04:51.998277 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:04:52 crc kubenswrapper[4784]: E1205 13:04:52.000446 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:03 crc kubenswrapper[4784]: I1205 13:05:03.002510 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:05:03 crc kubenswrapper[4784]: E1205 13:05:03.003409 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:17 crc kubenswrapper[4784]: I1205 13:05:17.000014 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:05:17 crc kubenswrapper[4784]: E1205 13:05:17.001039 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:27 crc kubenswrapper[4784]: I1205 13:05:27.998626 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:05:28 crc kubenswrapper[4784]: E1205 13:05:27.999393 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:38 crc kubenswrapper[4784]: I1205 13:05:38.999158 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:05:39 crc kubenswrapper[4784]: E1205 13:05:39.000128 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.231478 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.234075 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.243216 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.387776 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnntm\" (UniqueName: \"kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.387855 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.387940 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.490067 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnntm\" (UniqueName: \"kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.490140 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.490297 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.490682 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.490798 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.524632 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnntm\" (UniqueName: \"kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm\") pod \"redhat-operators-zblws\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:45 crc kubenswrapper[4784]: I1205 13:05:45.560419 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:46 crc kubenswrapper[4784]: I1205 13:05:46.162448 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:05:46 crc kubenswrapper[4784]: E1205 13:05:46.592253 4784 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b5744ea_9a56_4e20_8a6d_5ca65c878aa1.slice/crio-conmon-3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1.scope\": RecentStats: unable to find data in memory cache]" Dec 05 13:05:46 crc kubenswrapper[4784]: I1205 13:05:46.750487 4784 generic.go:334] "Generic (PLEG): container finished" podID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerID="3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1" exitCode=0 Dec 05 13:05:46 crc kubenswrapper[4784]: I1205 13:05:46.750605 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerDied","Data":"3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1"} Dec 05 13:05:46 crc kubenswrapper[4784]: I1205 13:05:46.750857 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerStarted","Data":"fe6607ab0681d559fb41a348280580a8cc2e26ac562092342f5092c3dd41269f"} Dec 05 13:05:46 crc kubenswrapper[4784]: I1205 13:05:46.752582 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:05:47 crc kubenswrapper[4784]: I1205 13:05:47.762944 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerStarted","Data":"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba"} Dec 05 13:05:49 crc kubenswrapper[4784]: I1205 13:05:49.998931 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:05:50 crc kubenswrapper[4784]: E1205 13:05:49.999742 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:05:50 crc kubenswrapper[4784]: I1205 13:05:50.796226 4784 generic.go:334] "Generic (PLEG): container finished" podID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerID="d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba" exitCode=0 Dec 05 13:05:50 crc kubenswrapper[4784]: I1205 13:05:50.796238 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerDied","Data":"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba"} Dec 05 13:05:52 crc kubenswrapper[4784]: I1205 13:05:52.816549 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerStarted","Data":"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a"} Dec 05 13:05:52 crc kubenswrapper[4784]: I1205 13:05:52.850345 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zblws" podStartSLOduration=2.954586365 podStartE2EDuration="7.85032401s" podCreationTimestamp="2025-12-05 13:05:45 +0000 UTC" firstStartedPulling="2025-12-05 13:05:46.752343747 +0000 UTC m=+2426.172410562" lastFinishedPulling="2025-12-05 13:05:51.648081392 +0000 UTC m=+2431.068148207" observedRunningTime="2025-12-05 13:05:52.844077465 +0000 UTC m=+2432.264144280" watchObservedRunningTime="2025-12-05 13:05:52.85032401 +0000 UTC m=+2432.270390825" Dec 05 13:05:55 crc kubenswrapper[4784]: I1205 13:05:55.561052 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:55 crc kubenswrapper[4784]: I1205 13:05:55.561379 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:05:56 crc kubenswrapper[4784]: I1205 13:05:56.613278 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zblws" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="registry-server" probeResult="failure" output=< Dec 05 13:05:56 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 13:05:56 crc kubenswrapper[4784]: > Dec 05 13:06:01 crc kubenswrapper[4784]: I1205 13:06:01.017407 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:06:01 crc kubenswrapper[4784]: E1205 13:06:01.018564 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:06:05 crc kubenswrapper[4784]: I1205 13:06:05.660527 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:06:05 crc kubenswrapper[4784]: I1205 13:06:05.725858 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:06:08 crc kubenswrapper[4784]: I1205 13:06:08.935395 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:06:08 crc kubenswrapper[4784]: I1205 13:06:08.936236 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zblws" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="registry-server" containerID="cri-o://78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a" gracePeriod=2 Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.406122 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.522660 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnntm\" (UniqueName: \"kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm\") pod \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.526200 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content\") pod \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.527102 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities\") pod \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\" (UID: \"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1\") " Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.527976 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities" (OuterVolumeSpecName: "utilities") pod "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" (UID: "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.528408 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm" (OuterVolumeSpecName: "kube-api-access-fnntm") pod "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" (UID: "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1"). InnerVolumeSpecName "kube-api-access-fnntm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.528930 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.529009 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnntm\" (UniqueName: \"kubernetes.io/projected/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-kube-api-access-fnntm\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.632343 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" (UID: "6b5744ea-9a56-4e20-8a6d-5ca65c878aa1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:06:09 crc kubenswrapper[4784]: I1205 13:06:09.733690 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.014049 4784 generic.go:334] "Generic (PLEG): container finished" podID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerID="78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a" exitCode=0 Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.015050 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerDied","Data":"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a"} Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.015103 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zblws" event={"ID":"6b5744ea-9a56-4e20-8a6d-5ca65c878aa1","Type":"ContainerDied","Data":"fe6607ab0681d559fb41a348280580a8cc2e26ac562092342f5092c3dd41269f"} Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.015486 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zblws" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.015616 4784 scope.go:117] "RemoveContainer" containerID="78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.047144 4784 scope.go:117] "RemoveContainer" containerID="d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.065327 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.074230 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zblws"] Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.077635 4784 scope.go:117] "RemoveContainer" containerID="3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.136536 4784 scope.go:117] "RemoveContainer" containerID="78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a" Dec 05 13:06:10 crc kubenswrapper[4784]: E1205 13:06:10.136931 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a\": container with ID starting with 78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a not found: ID does not exist" containerID="78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.136980 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a"} err="failed to get container status \"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a\": rpc error: code = NotFound desc = could not find container \"78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a\": container with ID starting with 78abc3d69096fd422746dbb39d386579ec3c8a0800bfa4e070f8d3ce75c0638a not found: ID does not exist" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.137012 4784 scope.go:117] "RemoveContainer" containerID="d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba" Dec 05 13:06:10 crc kubenswrapper[4784]: E1205 13:06:10.137326 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba\": container with ID starting with d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba not found: ID does not exist" containerID="d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.137359 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba"} err="failed to get container status \"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba\": rpc error: code = NotFound desc = could not find container \"d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba\": container with ID starting with d022a5530bd1f1fb990bafab66a4c80af5831a84db35b89f7a4ee398bed1a7ba not found: ID does not exist" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.137385 4784 scope.go:117] "RemoveContainer" containerID="3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1" Dec 05 13:06:10 crc kubenswrapper[4784]: E1205 13:06:10.137643 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1\": container with ID starting with 3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1 not found: ID does not exist" containerID="3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1" Dec 05 13:06:10 crc kubenswrapper[4784]: I1205 13:06:10.137677 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1"} err="failed to get container status \"3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1\": rpc error: code = NotFound desc = could not find container \"3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1\": container with ID starting with 3a4d320b84a2a8ffc229ba046315e14e2ab5a274c99bd15b51b193c5d15a8ec1 not found: ID does not exist" Dec 05 13:06:11 crc kubenswrapper[4784]: I1205 13:06:11.010143 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" path="/var/lib/kubelet/pods/6b5744ea-9a56-4e20-8a6d-5ca65c878aa1/volumes" Dec 05 13:06:14 crc kubenswrapper[4784]: I1205 13:06:13.999938 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:06:14 crc kubenswrapper[4784]: E1205 13:06:14.001524 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:06:28 crc kubenswrapper[4784]: I1205 13:06:28.999392 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:06:29 crc kubenswrapper[4784]: E1205 13:06:29.000271 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:06:43 crc kubenswrapper[4784]: I1205 13:06:42.999941 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:06:43 crc kubenswrapper[4784]: E1205 13:06:43.000862 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:06:56 crc kubenswrapper[4784]: I1205 13:06:55.999797 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:06:56 crc kubenswrapper[4784]: E1205 13:06:56.000800 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:07:10 crc kubenswrapper[4784]: I1205 13:07:09.999550 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:07:10 crc kubenswrapper[4784]: E1205 13:07:10.000379 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:07:25 crc kubenswrapper[4784]: I1205 13:07:25.000291 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:07:25 crc kubenswrapper[4784]: E1205 13:07:25.001353 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:07:37 crc kubenswrapper[4784]: I1205 13:07:37.000399 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:07:38 crc kubenswrapper[4784]: I1205 13:07:38.274259 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538"} Dec 05 13:08:36 crc kubenswrapper[4784]: I1205 13:08:36.913974 4784 generic.go:334] "Generic (PLEG): container finished" podID="ba1cfa9d-6665-4a66-a134-28fae26e36a2" containerID="ae045b33fdc111b7b44ed8bbe93d291d9af973d5e6ded43e4833d23212b272a8" exitCode=0 Dec 05 13:08:36 crc kubenswrapper[4784]: I1205 13:08:36.914180 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" event={"ID":"ba1cfa9d-6665-4a66-a134-28fae26e36a2","Type":"ContainerDied","Data":"ae045b33fdc111b7b44ed8bbe93d291d9af973d5e6ded43e4833d23212b272a8"} Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.381177 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.482040 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle\") pod \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.482179 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0\") pod \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.482227 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bpmc\" (UniqueName: \"kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc\") pod \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.482264 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory\") pod \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.482310 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key\") pod \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\" (UID: \"ba1cfa9d-6665-4a66-a134-28fae26e36a2\") " Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.491956 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ba1cfa9d-6665-4a66-a134-28fae26e36a2" (UID: "ba1cfa9d-6665-4a66-a134-28fae26e36a2"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.492543 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc" (OuterVolumeSpecName: "kube-api-access-5bpmc") pod "ba1cfa9d-6665-4a66-a134-28fae26e36a2" (UID: "ba1cfa9d-6665-4a66-a134-28fae26e36a2"). InnerVolumeSpecName "kube-api-access-5bpmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.508641 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory" (OuterVolumeSpecName: "inventory") pod "ba1cfa9d-6665-4a66-a134-28fae26e36a2" (UID: "ba1cfa9d-6665-4a66-a134-28fae26e36a2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.517858 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ba1cfa9d-6665-4a66-a134-28fae26e36a2" (UID: "ba1cfa9d-6665-4a66-a134-28fae26e36a2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.518404 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "ba1cfa9d-6665-4a66-a134-28fae26e36a2" (UID: "ba1cfa9d-6665-4a66-a134-28fae26e36a2"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.585881 4784 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.585951 4784 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.585978 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bpmc\" (UniqueName: \"kubernetes.io/projected/ba1cfa9d-6665-4a66-a134-28fae26e36a2-kube-api-access-5bpmc\") on node \"crc\" DevicePath \"\"" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.586008 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.586037 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1cfa9d-6665-4a66-a134-28fae26e36a2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.943517 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" event={"ID":"ba1cfa9d-6665-4a66-a134-28fae26e36a2","Type":"ContainerDied","Data":"1db5dd288f587d1539272efb5aea6b8b922551169fdde0c07a1bbc8ddd53d681"} Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.943627 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1db5dd288f587d1539272efb5aea6b8b922551169fdde0c07a1bbc8ddd53d681" Dec 05 13:08:38 crc kubenswrapper[4784]: I1205 13:08:38.943654 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.075760 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz"] Dec 05 13:08:39 crc kubenswrapper[4784]: E1205 13:08:39.076284 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="registry-server" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076305 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="registry-server" Dec 05 13:08:39 crc kubenswrapper[4784]: E1205 13:08:39.076331 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1cfa9d-6665-4a66-a134-28fae26e36a2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076341 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1cfa9d-6665-4a66-a134-28fae26e36a2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 13:08:39 crc kubenswrapper[4784]: E1205 13:08:39.076353 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="extract-utilities" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076362 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="extract-utilities" Dec 05 13:08:39 crc kubenswrapper[4784]: E1205 13:08:39.076384 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="extract-content" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076392 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="extract-content" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076646 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5744ea-9a56-4e20-8a6d-5ca65c878aa1" containerName="registry-server" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.076677 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba1cfa9d-6665-4a66-a134-28fae26e36a2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.077634 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.085908 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.086228 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.086421 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.086511 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.086588 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.086672 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.088813 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz"] Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.091398 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.198826 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.198895 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199084 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199157 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199394 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2dlx\" (UniqueName: \"kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199486 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199565 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199612 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.199758 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301394 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301518 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301582 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301624 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301648 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301684 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2dlx\" (UniqueName: \"kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301715 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301751 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.301927 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.302848 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.307569 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.307720 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.307813 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.311804 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.312653 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.313124 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.313508 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.319846 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2dlx\" (UniqueName: \"kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m6kfz\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.414404 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:08:39 crc kubenswrapper[4784]: I1205 13:08:39.994676 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz"] Dec 05 13:08:40 crc kubenswrapper[4784]: I1205 13:08:40.978103 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" event={"ID":"73ebab61-4062-476d-84bc-1013b097d5ac","Type":"ContainerStarted","Data":"e77ab5e307ead1004cafb1a29973ea15fe0bea3f0bfca3e3581fc48000996df7"} Dec 05 13:08:41 crc kubenswrapper[4784]: I1205 13:08:41.989661 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" event={"ID":"73ebab61-4062-476d-84bc-1013b097d5ac","Type":"ContainerStarted","Data":"672d9cf7030585091a4164e338d34d614e927e555d2cf2874d2ad9dd0269a654"} Dec 05 13:08:42 crc kubenswrapper[4784]: I1205 13:08:42.030964 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" podStartSLOduration=2.5302203629999998 podStartE2EDuration="3.030939984s" podCreationTimestamp="2025-12-05 13:08:39 +0000 UTC" firstStartedPulling="2025-12-05 13:08:40.005281625 +0000 UTC m=+2599.425348440" lastFinishedPulling="2025-12-05 13:08:40.506001246 +0000 UTC m=+2599.926068061" observedRunningTime="2025-12-05 13:08:42.02127333 +0000 UTC m=+2601.441340145" watchObservedRunningTime="2025-12-05 13:08:42.030939984 +0000 UTC m=+2601.451006839" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.734323 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.738450 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.756313 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.816808 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.816910 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txnz8\" (UniqueName: \"kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.816937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.918683 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.918797 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txnz8\" (UniqueName: \"kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.918832 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.919232 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.919407 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:50 crc kubenswrapper[4784]: I1205 13:09:50.942441 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txnz8\" (UniqueName: \"kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8\") pod \"redhat-marketplace-67mj7\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:51 crc kubenswrapper[4784]: I1205 13:09:51.061644 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:09:51 crc kubenswrapper[4784]: I1205 13:09:51.540927 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:09:51 crc kubenswrapper[4784]: I1205 13:09:51.780681 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerStarted","Data":"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d"} Dec 05 13:09:51 crc kubenswrapper[4784]: I1205 13:09:51.780734 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerStarted","Data":"804a1f5a97e38d6ab8c3c742d0e849fff891cf0e75eadf49c64c40ec2840dd2a"} Dec 05 13:09:52 crc kubenswrapper[4784]: I1205 13:09:52.798389 4784 generic.go:334] "Generic (PLEG): container finished" podID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerID="babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d" exitCode=0 Dec 05 13:09:52 crc kubenswrapper[4784]: I1205 13:09:52.798926 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerDied","Data":"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d"} Dec 05 13:09:54 crc kubenswrapper[4784]: I1205 13:09:54.823908 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerStarted","Data":"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974"} Dec 05 13:09:55 crc kubenswrapper[4784]: I1205 13:09:55.845949 4784 generic.go:334] "Generic (PLEG): container finished" podID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerID="c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974" exitCode=0 Dec 05 13:09:55 crc kubenswrapper[4784]: I1205 13:09:55.846115 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerDied","Data":"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974"} Dec 05 13:09:56 crc kubenswrapper[4784]: I1205 13:09:56.865104 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerStarted","Data":"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef"} Dec 05 13:09:56 crc kubenswrapper[4784]: I1205 13:09:56.888378 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-67mj7" podStartSLOduration=3.404514196 podStartE2EDuration="6.888354538s" podCreationTimestamp="2025-12-05 13:09:50 +0000 UTC" firstStartedPulling="2025-12-05 13:09:52.803164865 +0000 UTC m=+2672.223231680" lastFinishedPulling="2025-12-05 13:09:56.287005197 +0000 UTC m=+2675.707072022" observedRunningTime="2025-12-05 13:09:56.88746413 +0000 UTC m=+2676.307530985" watchObservedRunningTime="2025-12-05 13:09:56.888354538 +0000 UTC m=+2676.308421363" Dec 05 13:09:59 crc kubenswrapper[4784]: I1205 13:09:59.572845 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:09:59 crc kubenswrapper[4784]: I1205 13:09:59.573307 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:10:01 crc kubenswrapper[4784]: I1205 13:10:01.072293 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:01 crc kubenswrapper[4784]: I1205 13:10:01.072626 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:01 crc kubenswrapper[4784]: I1205 13:10:01.118373 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:01 crc kubenswrapper[4784]: I1205 13:10:01.977157 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:02 crc kubenswrapper[4784]: I1205 13:10:02.031809 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:10:03 crc kubenswrapper[4784]: I1205 13:10:03.938172 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-67mj7" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="registry-server" containerID="cri-o://3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef" gracePeriod=2 Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.398638 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.531326 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities\") pod \"886381d3-e137-4243-80e5-2b3bf8a435f4\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.531451 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content\") pod \"886381d3-e137-4243-80e5-2b3bf8a435f4\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.531497 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txnz8\" (UniqueName: \"kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8\") pod \"886381d3-e137-4243-80e5-2b3bf8a435f4\" (UID: \"886381d3-e137-4243-80e5-2b3bf8a435f4\") " Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.533144 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities" (OuterVolumeSpecName: "utilities") pod "886381d3-e137-4243-80e5-2b3bf8a435f4" (UID: "886381d3-e137-4243-80e5-2b3bf8a435f4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.537354 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8" (OuterVolumeSpecName: "kube-api-access-txnz8") pod "886381d3-e137-4243-80e5-2b3bf8a435f4" (UID: "886381d3-e137-4243-80e5-2b3bf8a435f4"). InnerVolumeSpecName "kube-api-access-txnz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.555961 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "886381d3-e137-4243-80e5-2b3bf8a435f4" (UID: "886381d3-e137-4243-80e5-2b3bf8a435f4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.635027 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.635088 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/886381d3-e137-4243-80e5-2b3bf8a435f4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.635112 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txnz8\" (UniqueName: \"kubernetes.io/projected/886381d3-e137-4243-80e5-2b3bf8a435f4-kube-api-access-txnz8\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.786306 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:04 crc kubenswrapper[4784]: E1205 13:10:04.790003 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="extract-content" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.790134 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="extract-content" Dec 05 13:10:04 crc kubenswrapper[4784]: E1205 13:10:04.790293 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="registry-server" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.790434 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="registry-server" Dec 05 13:10:04 crc kubenswrapper[4784]: E1205 13:10:04.790585 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="extract-utilities" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.790676 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="extract-utilities" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.791450 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerName="registry-server" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.795422 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.808800 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.941044 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.941139 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59bk4\" (UniqueName: \"kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.941357 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.949487 4784 generic.go:334] "Generic (PLEG): container finished" podID="886381d3-e137-4243-80e5-2b3bf8a435f4" containerID="3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef" exitCode=0 Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.949529 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerDied","Data":"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef"} Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.949557 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mj7" event={"ID":"886381d3-e137-4243-80e5-2b3bf8a435f4","Type":"ContainerDied","Data":"804a1f5a97e38d6ab8c3c742d0e849fff891cf0e75eadf49c64c40ec2840dd2a"} Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.949575 4784 scope.go:117] "RemoveContainer" containerID="3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.949642 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mj7" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.981122 4784 scope.go:117] "RemoveContainer" containerID="c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974" Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.984810 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:10:04 crc kubenswrapper[4784]: I1205 13:10:04.996069 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mj7"] Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.010978 4784 scope.go:117] "RemoveContainer" containerID="babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.020022 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="886381d3-e137-4243-80e5-2b3bf8a435f4" path="/var/lib/kubelet/pods/886381d3-e137-4243-80e5-2b3bf8a435f4/volumes" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.044006 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.044066 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.044143 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59bk4\" (UniqueName: \"kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.044837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.044849 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.046585 4784 scope.go:117] "RemoveContainer" containerID="3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef" Dec 05 13:10:05 crc kubenswrapper[4784]: E1205 13:10:05.046934 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef\": container with ID starting with 3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef not found: ID does not exist" containerID="3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.046967 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef"} err="failed to get container status \"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef\": rpc error: code = NotFound desc = could not find container \"3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef\": container with ID starting with 3b7317cd3f59eaa1a3c22d1e9275ef320522b6371a9a7129b65d042995992bef not found: ID does not exist" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.046995 4784 scope.go:117] "RemoveContainer" containerID="c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974" Dec 05 13:10:05 crc kubenswrapper[4784]: E1205 13:10:05.047281 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974\": container with ID starting with c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974 not found: ID does not exist" containerID="c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.047302 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974"} err="failed to get container status \"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974\": rpc error: code = NotFound desc = could not find container \"c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974\": container with ID starting with c0c6e422ba1d61237e8a593dbabd36b6de8c82cd0848da4abacac1f6f3767974 not found: ID does not exist" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.047314 4784 scope.go:117] "RemoveContainer" containerID="babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d" Dec 05 13:10:05 crc kubenswrapper[4784]: E1205 13:10:05.047641 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d\": container with ID starting with babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d not found: ID does not exist" containerID="babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.047664 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d"} err="failed to get container status \"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d\": rpc error: code = NotFound desc = could not find container \"babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d\": container with ID starting with babcd4ca07c99d425280c1e3f325c3ef9f57b4d14b02c97e5908f4ef9280d83d not found: ID does not exist" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.062283 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59bk4\" (UniqueName: \"kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4\") pod \"community-operators-f6kzd\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.126111 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.659019 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.961810 4784 generic.go:334] "Generic (PLEG): container finished" podID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerID="c1a4c6780c2079dd7ca08065974f3d67aa35615aa7b9547545596b4506afec68" exitCode=0 Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.961889 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerDied","Data":"c1a4c6780c2079dd7ca08065974f3d67aa35615aa7b9547545596b4506afec68"} Dec 05 13:10:05 crc kubenswrapper[4784]: I1205 13:10:05.962180 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerStarted","Data":"d0b2635b08f216a192c087fb27e25d097adf48bcd733b1f290835c39072a41aa"} Dec 05 13:10:06 crc kubenswrapper[4784]: I1205 13:10:06.974819 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerStarted","Data":"72f972e37c05d22b705896f305f01263d9d9fd3e0a059bb5faf675acee3240d7"} Dec 05 13:10:07 crc kubenswrapper[4784]: I1205 13:10:07.990465 4784 generic.go:334] "Generic (PLEG): container finished" podID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerID="72f972e37c05d22b705896f305f01263d9d9fd3e0a059bb5faf675acee3240d7" exitCode=0 Dec 05 13:10:07 crc kubenswrapper[4784]: I1205 13:10:07.990528 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerDied","Data":"72f972e37c05d22b705896f305f01263d9d9fd3e0a059bb5faf675acee3240d7"} Dec 05 13:10:10 crc kubenswrapper[4784]: I1205 13:10:10.017983 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerStarted","Data":"f5eac001e9f2287c801a73e06b2920b7defa590bf846e20155d9cbbebd5e1062"} Dec 05 13:10:10 crc kubenswrapper[4784]: I1205 13:10:10.048905 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6kzd" podStartSLOduration=3.194880776 podStartE2EDuration="6.048874667s" podCreationTimestamp="2025-12-05 13:10:04 +0000 UTC" firstStartedPulling="2025-12-05 13:10:05.964000324 +0000 UTC m=+2685.384067139" lastFinishedPulling="2025-12-05 13:10:08.817994205 +0000 UTC m=+2688.238061030" observedRunningTime="2025-12-05 13:10:10.043163599 +0000 UTC m=+2689.463230424" watchObservedRunningTime="2025-12-05 13:10:10.048874667 +0000 UTC m=+2689.468941522" Dec 05 13:10:15 crc kubenswrapper[4784]: I1205 13:10:15.127879 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:15 crc kubenswrapper[4784]: I1205 13:10:15.128507 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:15 crc kubenswrapper[4784]: I1205 13:10:15.192170 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:16 crc kubenswrapper[4784]: I1205 13:10:16.139865 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:16 crc kubenswrapper[4784]: I1205 13:10:16.196506 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:18 crc kubenswrapper[4784]: I1205 13:10:18.099441 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f6kzd" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="registry-server" containerID="cri-o://f5eac001e9f2287c801a73e06b2920b7defa590bf846e20155d9cbbebd5e1062" gracePeriod=2 Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.123730 4784 generic.go:334] "Generic (PLEG): container finished" podID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerID="f5eac001e9f2287c801a73e06b2920b7defa590bf846e20155d9cbbebd5e1062" exitCode=0 Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.123780 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerDied","Data":"f5eac001e9f2287c801a73e06b2920b7defa590bf846e20155d9cbbebd5e1062"} Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.322534 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.470338 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities\") pod \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.470493 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59bk4\" (UniqueName: \"kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4\") pod \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.470543 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content\") pod \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\" (UID: \"8cc34a4e-d0ad-40c2-9398-df6b7a88e717\") " Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.471389 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities" (OuterVolumeSpecName: "utilities") pod "8cc34a4e-d0ad-40c2-9398-df6b7a88e717" (UID: "8cc34a4e-d0ad-40c2-9398-df6b7a88e717"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.475986 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4" (OuterVolumeSpecName: "kube-api-access-59bk4") pod "8cc34a4e-d0ad-40c2-9398-df6b7a88e717" (UID: "8cc34a4e-d0ad-40c2-9398-df6b7a88e717"). InnerVolumeSpecName "kube-api-access-59bk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.520849 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cc34a4e-d0ad-40c2-9398-df6b7a88e717" (UID: "8cc34a4e-d0ad-40c2-9398-df6b7a88e717"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.573026 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.573058 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:19 crc kubenswrapper[4784]: I1205 13:10:19.573068 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59bk4\" (UniqueName: \"kubernetes.io/projected/8cc34a4e-d0ad-40c2-9398-df6b7a88e717-kube-api-access-59bk4\") on node \"crc\" DevicePath \"\"" Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.143737 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6kzd" event={"ID":"8cc34a4e-d0ad-40c2-9398-df6b7a88e717","Type":"ContainerDied","Data":"d0b2635b08f216a192c087fb27e25d097adf48bcd733b1f290835c39072a41aa"} Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.144094 4784 scope.go:117] "RemoveContainer" containerID="f5eac001e9f2287c801a73e06b2920b7defa590bf846e20155d9cbbebd5e1062" Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.143888 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6kzd" Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.211579 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.217478 4784 scope.go:117] "RemoveContainer" containerID="72f972e37c05d22b705896f305f01263d9d9fd3e0a059bb5faf675acee3240d7" Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.224506 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f6kzd"] Dec 05 13:10:20 crc kubenswrapper[4784]: I1205 13:10:20.253317 4784 scope.go:117] "RemoveContainer" containerID="c1a4c6780c2079dd7ca08065974f3d67aa35615aa7b9547545596b4506afec68" Dec 05 13:10:21 crc kubenswrapper[4784]: I1205 13:10:21.019269 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" path="/var/lib/kubelet/pods/8cc34a4e-d0ad-40c2-9398-df6b7a88e717/volumes" Dec 05 13:10:29 crc kubenswrapper[4784]: I1205 13:10:29.572126 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:10:29 crc kubenswrapper[4784]: I1205 13:10:29.572849 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:10:59 crc kubenswrapper[4784]: I1205 13:10:59.572577 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:10:59 crc kubenswrapper[4784]: I1205 13:10:59.573129 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:10:59 crc kubenswrapper[4784]: I1205 13:10:59.573171 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:10:59 crc kubenswrapper[4784]: I1205 13:10:59.573864 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:10:59 crc kubenswrapper[4784]: I1205 13:10:59.573918 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538" gracePeriod=600 Dec 05 13:11:00 crc kubenswrapper[4784]: I1205 13:11:00.558846 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538" exitCode=0 Dec 05 13:11:00 crc kubenswrapper[4784]: I1205 13:11:00.558949 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538"} Dec 05 13:11:00 crc kubenswrapper[4784]: I1205 13:11:00.559513 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f"} Dec 05 13:11:00 crc kubenswrapper[4784]: I1205 13:11:00.559546 4784 scope.go:117] "RemoveContainer" containerID="cf372dd1dff8e9ccbb780396921dcc2542b196734ec18e5c9adbd311c71fa1a9" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.698834 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:34 crc kubenswrapper[4784]: E1205 13:11:34.701378 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="extract-utilities" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.701419 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="extract-utilities" Dec 05 13:11:34 crc kubenswrapper[4784]: E1205 13:11:34.701444 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="registry-server" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.701458 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="registry-server" Dec 05 13:11:34 crc kubenswrapper[4784]: E1205 13:11:34.701475 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="extract-content" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.701488 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="extract-content" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.701993 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc34a4e-d0ad-40c2-9398-df6b7a88e717" containerName="registry-server" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.719045 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.728090 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.821102 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.821251 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.821673 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z8tb\" (UniqueName: \"kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.923461 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.923551 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.923706 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z8tb\" (UniqueName: \"kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.923990 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.923989 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:34 crc kubenswrapper[4784]: I1205 13:11:34.943582 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z8tb\" (UniqueName: \"kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb\") pod \"certified-operators-t8ffp\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.045138 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.563333 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.934391 4784 generic.go:334] "Generic (PLEG): container finished" podID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerID="59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933" exitCode=0 Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.934457 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerDied","Data":"59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933"} Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.934806 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerStarted","Data":"992fca2caccce2270cee3e7c7bafeab8b6615d9e6b60e4b6e0928f3ded27d7dd"} Dec 05 13:11:35 crc kubenswrapper[4784]: I1205 13:11:35.936955 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:11:37 crc kubenswrapper[4784]: I1205 13:11:37.963967 4784 generic.go:334] "Generic (PLEG): container finished" podID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerID="0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3" exitCode=0 Dec 05 13:11:37 crc kubenswrapper[4784]: I1205 13:11:37.964040 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerDied","Data":"0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3"} Dec 05 13:11:39 crc kubenswrapper[4784]: I1205 13:11:39.992803 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerStarted","Data":"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602"} Dec 05 13:11:40 crc kubenswrapper[4784]: I1205 13:11:40.023078 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t8ffp" podStartSLOduration=3.133526253 podStartE2EDuration="6.023060165s" podCreationTimestamp="2025-12-05 13:11:34 +0000 UTC" firstStartedPulling="2025-12-05 13:11:35.936639744 +0000 UTC m=+2775.356706559" lastFinishedPulling="2025-12-05 13:11:38.826173646 +0000 UTC m=+2778.246240471" observedRunningTime="2025-12-05 13:11:40.020501885 +0000 UTC m=+2779.440568720" watchObservedRunningTime="2025-12-05 13:11:40.023060165 +0000 UTC m=+2779.443126990" Dec 05 13:11:42 crc kubenswrapper[4784]: I1205 13:11:42.016953 4784 generic.go:334] "Generic (PLEG): container finished" podID="73ebab61-4062-476d-84bc-1013b097d5ac" containerID="672d9cf7030585091a4164e338d34d614e927e555d2cf2874d2ad9dd0269a654" exitCode=0 Dec 05 13:11:42 crc kubenswrapper[4784]: I1205 13:11:42.017057 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" event={"ID":"73ebab61-4062-476d-84bc-1013b097d5ac","Type":"ContainerDied","Data":"672d9cf7030585091a4164e338d34d614e927e555d2cf2874d2ad9dd0269a654"} Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.500741 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628540 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628643 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628832 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628879 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628894 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2dlx\" (UniqueName: \"kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.628993 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.629053 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.629076 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.629119 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0\") pod \"73ebab61-4062-476d-84bc-1013b097d5ac\" (UID: \"73ebab61-4062-476d-84bc-1013b097d5ac\") " Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.635758 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx" (OuterVolumeSpecName: "kube-api-access-c2dlx") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "kube-api-access-c2dlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.636827 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.657346 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.659590 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.666891 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.667289 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.670106 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.672343 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.679575 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory" (OuterVolumeSpecName: "inventory") pod "73ebab61-4062-476d-84bc-1013b097d5ac" (UID: "73ebab61-4062-476d-84bc-1013b097d5ac"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731416 4784 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731463 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2dlx\" (UniqueName: \"kubernetes.io/projected/73ebab61-4062-476d-84bc-1013b097d5ac-kube-api-access-c2dlx\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731472 4784 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731482 4784 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731493 4784 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/73ebab61-4062-476d-84bc-1013b097d5ac-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731501 4784 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731510 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731521 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:43 crc kubenswrapper[4784]: I1205 13:11:43.731532 4784 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/73ebab61-4062-476d-84bc-1013b097d5ac-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.038476 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" event={"ID":"73ebab61-4062-476d-84bc-1013b097d5ac","Type":"ContainerDied","Data":"e77ab5e307ead1004cafb1a29973ea15fe0bea3f0bfca3e3581fc48000996df7"} Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.038513 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e77ab5e307ead1004cafb1a29973ea15fe0bea3f0bfca3e3581fc48000996df7" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.038608 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m6kfz" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.240484 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2"] Dec 05 13:11:44 crc kubenswrapper[4784]: E1205 13:11:44.241023 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73ebab61-4062-476d-84bc-1013b097d5ac" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.241047 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="73ebab61-4062-476d-84bc-1013b097d5ac" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.241335 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="73ebab61-4062-476d-84bc-1013b097d5ac" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.242058 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.245225 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.245274 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.245292 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-lwfzg" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.245235 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.245691 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.255765 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2"] Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343546 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343607 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343669 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmwpc\" (UniqueName: \"kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343709 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343756 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343773 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.343789 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445681 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445764 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445850 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmwpc\" (UniqueName: \"kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445904 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445968 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.445996 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.446023 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.449581 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.451168 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.451321 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.451949 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.452728 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.453515 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.464832 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmwpc\" (UniqueName: \"kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:44 crc kubenswrapper[4784]: I1205 13:11:44.560897 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:11:45 crc kubenswrapper[4784]: I1205 13:11:45.045390 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:45 crc kubenswrapper[4784]: I1205 13:11:45.045869 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:45 crc kubenswrapper[4784]: I1205 13:11:45.109978 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:45 crc kubenswrapper[4784]: I1205 13:11:45.123307 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2"] Dec 05 13:11:46 crc kubenswrapper[4784]: I1205 13:11:46.060498 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" event={"ID":"3c4b4608-406d-431c-a042-bd54eb2643f9","Type":"ContainerStarted","Data":"e583515379998830b1302d702f94c2372c5c3493e049e0f1c237fdccc5862e60"} Dec 05 13:11:46 crc kubenswrapper[4784]: I1205 13:11:46.060991 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" event={"ID":"3c4b4608-406d-431c-a042-bd54eb2643f9","Type":"ContainerStarted","Data":"005c0ce1bcd4d7f8e3fbf53d623470289be13f4303919d629380d95583fbe208"} Dec 05 13:11:46 crc kubenswrapper[4784]: I1205 13:11:46.094089 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" podStartSLOduration=1.65488111 podStartE2EDuration="2.094062835s" podCreationTimestamp="2025-12-05 13:11:44 +0000 UTC" firstStartedPulling="2025-12-05 13:11:45.116854437 +0000 UTC m=+2784.536921252" lastFinishedPulling="2025-12-05 13:11:45.556036162 +0000 UTC m=+2784.976102977" observedRunningTime="2025-12-05 13:11:46.086036805 +0000 UTC m=+2785.506103640" watchObservedRunningTime="2025-12-05 13:11:46.094062835 +0000 UTC m=+2785.514129650" Dec 05 13:11:46 crc kubenswrapper[4784]: I1205 13:11:46.133025 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:46 crc kubenswrapper[4784]: I1205 13:11:46.196745 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.079448 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t8ffp" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="registry-server" containerID="cri-o://950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602" gracePeriod=2 Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.698393 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.870527 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities\") pod \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.870677 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content\") pod \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.870856 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z8tb\" (UniqueName: \"kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb\") pod \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\" (UID: \"dd6cfe44-236b-4ed1-83ec-6d2b701d8431\") " Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.871545 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities" (OuterVolumeSpecName: "utilities") pod "dd6cfe44-236b-4ed1-83ec-6d2b701d8431" (UID: "dd6cfe44-236b-4ed1-83ec-6d2b701d8431"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.876873 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb" (OuterVolumeSpecName: "kube-api-access-8z8tb") pod "dd6cfe44-236b-4ed1-83ec-6d2b701d8431" (UID: "dd6cfe44-236b-4ed1-83ec-6d2b701d8431"). InnerVolumeSpecName "kube-api-access-8z8tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.936037 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd6cfe44-236b-4ed1-83ec-6d2b701d8431" (UID: "dd6cfe44-236b-4ed1-83ec-6d2b701d8431"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.973299 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z8tb\" (UniqueName: \"kubernetes.io/projected/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-kube-api-access-8z8tb\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.973590 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:48 crc kubenswrapper[4784]: I1205 13:11:48.973602 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd6cfe44-236b-4ed1-83ec-6d2b701d8431-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.091559 4784 generic.go:334] "Generic (PLEG): container finished" podID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerID="950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602" exitCode=0 Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.091637 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerDied","Data":"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602"} Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.091677 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t8ffp" event={"ID":"dd6cfe44-236b-4ed1-83ec-6d2b701d8431","Type":"ContainerDied","Data":"992fca2caccce2270cee3e7c7bafeab8b6615d9e6b60e4b6e0928f3ded27d7dd"} Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.091700 4784 scope.go:117] "RemoveContainer" containerID="950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.091878 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t8ffp" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.117375 4784 scope.go:117] "RemoveContainer" containerID="0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.130320 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.139221 4784 scope.go:117] "RemoveContainer" containerID="59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.147243 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t8ffp"] Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.186336 4784 scope.go:117] "RemoveContainer" containerID="950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602" Dec 05 13:11:49 crc kubenswrapper[4784]: E1205 13:11:49.186776 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602\": container with ID starting with 950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602 not found: ID does not exist" containerID="950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.186812 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602"} err="failed to get container status \"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602\": rpc error: code = NotFound desc = could not find container \"950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602\": container with ID starting with 950d34aa8fcbd2e5d50fe2439e76647b57d19f7144c22c19a0d7b9fd296a6602 not found: ID does not exist" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.186836 4784 scope.go:117] "RemoveContainer" containerID="0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3" Dec 05 13:11:49 crc kubenswrapper[4784]: E1205 13:11:49.187123 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3\": container with ID starting with 0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3 not found: ID does not exist" containerID="0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.187153 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3"} err="failed to get container status \"0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3\": rpc error: code = NotFound desc = could not find container \"0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3\": container with ID starting with 0bb30b5e326d8880c91040c5bc65b43e30e3ccd4d1dba14474fcb20c0bb031c3 not found: ID does not exist" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.187174 4784 scope.go:117] "RemoveContainer" containerID="59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933" Dec 05 13:11:49 crc kubenswrapper[4784]: E1205 13:11:49.187526 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933\": container with ID starting with 59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933 not found: ID does not exist" containerID="59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933" Dec 05 13:11:49 crc kubenswrapper[4784]: I1205 13:11:49.187585 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933"} err="failed to get container status \"59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933\": rpc error: code = NotFound desc = could not find container \"59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933\": container with ID starting with 59df90c160937cd20de3b7fc2d7e8e23dff40ca069ad3b3f7eb7aa5706d45933 not found: ID does not exist" Dec 05 13:11:51 crc kubenswrapper[4784]: I1205 13:11:51.016221 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" path="/var/lib/kubelet/pods/dd6cfe44-236b-4ed1-83ec-6d2b701d8431/volumes" Dec 05 13:12:59 crc kubenswrapper[4784]: I1205 13:12:59.572956 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:12:59 crc kubenswrapper[4784]: I1205 13:12:59.573462 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:13:29 crc kubenswrapper[4784]: I1205 13:13:29.572756 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:13:29 crc kubenswrapper[4784]: I1205 13:13:29.573515 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:13:59 crc kubenswrapper[4784]: I1205 13:13:59.572688 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:13:59 crc kubenswrapper[4784]: I1205 13:13:59.573323 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:13:59 crc kubenswrapper[4784]: I1205 13:13:59.573378 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:13:59 crc kubenswrapper[4784]: I1205 13:13:59.574248 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:13:59 crc kubenswrapper[4784]: I1205 13:13:59.574317 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" gracePeriod=600 Dec 05 13:14:00 crc kubenswrapper[4784]: E1205 13:14:00.218705 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:00 crc kubenswrapper[4784]: I1205 13:14:00.490800 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" exitCode=0 Dec 05 13:14:00 crc kubenswrapper[4784]: I1205 13:14:00.490850 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f"} Dec 05 13:14:00 crc kubenswrapper[4784]: I1205 13:14:00.490886 4784 scope.go:117] "RemoveContainer" containerID="20782b65b809ce339f6751afbc8faeb14f9a8c58aa3bf376517c44a4241af538" Dec 05 13:14:00 crc kubenswrapper[4784]: I1205 13:14:00.491695 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:14:00 crc kubenswrapper[4784]: E1205 13:14:00.491988 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:05 crc kubenswrapper[4784]: I1205 13:14:05.550825 4784 generic.go:334] "Generic (PLEG): container finished" podID="3c4b4608-406d-431c-a042-bd54eb2643f9" containerID="e583515379998830b1302d702f94c2372c5c3493e049e0f1c237fdccc5862e60" exitCode=0 Dec 05 13:14:05 crc kubenswrapper[4784]: I1205 13:14:05.550951 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" event={"ID":"3c4b4608-406d-431c-a042-bd54eb2643f9","Type":"ContainerDied","Data":"e583515379998830b1302d702f94c2372c5c3493e049e0f1c237fdccc5862e60"} Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.188875 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325114 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325185 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325424 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325500 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325540 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmwpc\" (UniqueName: \"kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325689 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.325763 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory\") pod \"3c4b4608-406d-431c-a042-bd54eb2643f9\" (UID: \"3c4b4608-406d-431c-a042-bd54eb2643f9\") " Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.331842 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.332669 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc" (OuterVolumeSpecName: "kube-api-access-mmwpc") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "kube-api-access-mmwpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.356623 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.356634 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.369036 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory" (OuterVolumeSpecName: "inventory") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.370180 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.385680 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "3c4b4608-406d-431c-a042-bd54eb2643f9" (UID: "3c4b4608-406d-431c-a042-bd54eb2643f9"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428388 4784 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428432 4784 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428447 4784 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428463 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428509 4784 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428523 4784 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/3c4b4608-406d-431c-a042-bd54eb2643f9-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.428536 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmwpc\" (UniqueName: \"kubernetes.io/projected/3c4b4608-406d-431c-a042-bd54eb2643f9-kube-api-access-mmwpc\") on node \"crc\" DevicePath \"\"" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.569631 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" event={"ID":"3c4b4608-406d-431c-a042-bd54eb2643f9","Type":"ContainerDied","Data":"005c0ce1bcd4d7f8e3fbf53d623470289be13f4303919d629380d95583fbe208"} Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.569669 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="005c0ce1bcd4d7f8e3fbf53d623470289be13f4303919d629380d95583fbe208" Dec 05 13:14:07 crc kubenswrapper[4784]: I1205 13:14:07.569706 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2" Dec 05 13:14:11 crc kubenswrapper[4784]: I1205 13:14:11.014023 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:14:11 crc kubenswrapper[4784]: E1205 13:14:11.014926 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:22 crc kubenswrapper[4784]: I1205 13:14:22.000326 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:14:22 crc kubenswrapper[4784]: E1205 13:14:22.001154 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:36 crc kubenswrapper[4784]: I1205 13:14:36.998682 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:14:37 crc kubenswrapper[4784]: E1205 13:14:36.999305 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.323432 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: E1205 13:14:44.324313 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="extract-content" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324329 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="extract-content" Dec 05 13:14:44 crc kubenswrapper[4784]: E1205 13:14:44.324339 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c4b4608-406d-431c-a042-bd54eb2643f9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324355 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c4b4608-406d-431c-a042-bd54eb2643f9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 13:14:44 crc kubenswrapper[4784]: E1205 13:14:44.324388 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="registry-server" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324398 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="registry-server" Dec 05 13:14:44 crc kubenswrapper[4784]: E1205 13:14:44.324443 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="extract-utilities" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324451 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="extract-utilities" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324687 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c4b4608-406d-431c-a042-bd54eb2643f9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.324701 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd6cfe44-236b-4ed1-83ec-6d2b701d8431" containerName="registry-server" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.326308 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.332533 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.357648 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.385475 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.387831 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.389912 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-2-config-data" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405232 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405270 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405295 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405319 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v26h6\" (UniqueName: \"kubernetes.io/projected/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-kube-api-access-v26h6\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405370 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-lib-modules\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405415 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405445 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405470 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-dev\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405492 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405529 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.405631 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-scripts\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.418310 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-sys\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.418474 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.418638 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.418712 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-run\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.420450 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.463526 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.465342 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.468644 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-config-data" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.485861 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520386 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520441 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520477 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520505 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-run\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520531 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520556 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520575 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520593 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520611 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520625 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520645 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520661 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520684 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v26h6\" (UniqueName: \"kubernetes.io/projected/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-kube-api-access-v26h6\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520699 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520723 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-lib-modules\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520744 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520763 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520782 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-dev\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520801 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520821 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520840 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520865 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520883 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520901 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520924 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-scripts\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520941 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-sys\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520958 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.520979 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.521000 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4gjs\" (UniqueName: \"kubernetes.io/projected/60971449-2443-4cba-90d2-7d1c6ba8acdd-kube-api-access-q4gjs\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.521030 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.521297 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.521328 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-run\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.521613 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-dev\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.522036 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.522405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-lib-modules\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.522855 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.522984 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-sys\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.523097 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.523163 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.523383 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.529320 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-scripts\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.529905 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.530079 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-config-data\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.540872 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.545103 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v26h6\" (UniqueName: \"kubernetes.io/projected/9db8a5e9-71b5-49aa-a45d-1361d3a021c9-kube-api-access-v26h6\") pod \"cinder-backup-0\" (UID: \"9db8a5e9-71b5-49aa-a45d-1361d3a021c9\") " pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622686 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622747 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622771 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622806 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4gjs\" (UniqueName: \"kubernetes.io/projected/60971449-2443-4cba-90d2-7d1c6ba8acdd-kube-api-access-q4gjs\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622833 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fklnh\" (UniqueName: \"kubernetes.io/projected/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-kube-api-access-fklnh\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622872 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622909 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622937 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.622969 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623001 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-sys\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623019 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623052 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623078 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623083 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623105 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623130 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623157 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623181 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623227 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623252 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623281 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623315 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623323 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623355 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623378 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623396 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623430 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623453 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-run\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623480 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623505 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623540 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623559 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-dev\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623600 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623642 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623733 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623790 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623814 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623840 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623863 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.623884 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/60971449-2443-4cba-90d2-7d1c6ba8acdd-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.627722 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.627874 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.628511 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.629856 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60971449-2443-4cba-90d2-7d1c6ba8acdd-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.642867 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4gjs\" (UniqueName: \"kubernetes.io/projected/60971449-2443-4cba-90d2-7d1c6ba8acdd-kube-api-access-q4gjs\") pod \"cinder-volume-nfs-2-0\" (UID: \"60971449-2443-4cba-90d2-7d1c6ba8acdd\") " pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.644797 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.702884 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725505 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725567 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fklnh\" (UniqueName: \"kubernetes.io/projected/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-kube-api-access-fklnh\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725610 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725628 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725656 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-sys\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725692 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725708 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725723 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725743 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725793 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725821 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725844 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-run\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725862 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725900 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-dev\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.725915 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726004 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726037 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726739 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726775 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726802 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.726823 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-run\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.727546 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-sys\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.727628 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.727665 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.728348 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-dev\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.734653 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.734703 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.737522 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.745161 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:44 crc kubenswrapper[4784]: I1205 13:14:44.792635 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fklnh\" (UniqueName: \"kubernetes.io/projected/3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed-kube-api-access-fklnh\") pod \"cinder-volume-nfs-0\" (UID: \"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed\") " pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.082984 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.346393 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.431557 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 05 13:14:45 crc kubenswrapper[4784]: W1205 13:14:45.781817 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d4dfc19_7b89_4ef4_9ead_18c78bbf69ed.slice/crio-21a4d2cf03f8ae7a2e419ac438fe6af6670c73816fc2ca9c1b45352f2bdf1dab WatchSource:0}: Error finding container 21a4d2cf03f8ae7a2e419ac438fe6af6670c73816fc2ca9c1b45352f2bdf1dab: Status 404 returned error can't find the container with id 21a4d2cf03f8ae7a2e419ac438fe6af6670c73816fc2ca9c1b45352f2bdf1dab Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.783467 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.981990 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"60971449-2443-4cba-90d2-7d1c6ba8acdd","Type":"ContainerStarted","Data":"22b7c5091bc9b9424e7a15dc1c6b0c49dda2f3f4719f935d6958548c117c1e1a"} Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.983773 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"9db8a5e9-71b5-49aa-a45d-1361d3a021c9","Type":"ContainerStarted","Data":"5c364fc1d0d608c67fb3384cc15222a6b22e7f382568637e7afdc0e12c9f95ef"} Dec 05 13:14:45 crc kubenswrapper[4784]: I1205 13:14:45.984772 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed","Type":"ContainerStarted","Data":"21a4d2cf03f8ae7a2e419ac438fe6af6670c73816fc2ca9c1b45352f2bdf1dab"} Dec 05 13:14:46 crc kubenswrapper[4784]: I1205 13:14:46.995096 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"60971449-2443-4cba-90d2-7d1c6ba8acdd","Type":"ContainerStarted","Data":"44ad57113f17d4d509bd4ec2218cecdf1689dab3b681fac67b5893145e4e97c5"} Dec 05 13:14:46 crc kubenswrapper[4784]: I1205 13:14:46.995724 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"60971449-2443-4cba-90d2-7d1c6ba8acdd","Type":"ContainerStarted","Data":"19ad2d84b4936565165be0b75271f3263ea09f6bb5ee1afabf63094d2e893371"} Dec 05 13:14:46 crc kubenswrapper[4784]: I1205 13:14:46.997042 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"9db8a5e9-71b5-49aa-a45d-1361d3a021c9","Type":"ContainerStarted","Data":"4f53d36e04724b36c76815a3c46690d8e1cd0a2c6a668e71442c1e7f03743fea"} Dec 05 13:14:46 crc kubenswrapper[4784]: I1205 13:14:46.997088 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"9db8a5e9-71b5-49aa-a45d-1361d3a021c9","Type":"ContainerStarted","Data":"b696a60d18cb5a9489b7c28c0e0b00db696e8a5a83f2def9d87ddae9cbf21857"} Dec 05 13:14:47 crc kubenswrapper[4784]: I1205 13:14:47.010807 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed","Type":"ContainerStarted","Data":"5daa10a542d405ade275696266865fa35d07937b41f58f04a41a00678eca2bb1"} Dec 05 13:14:47 crc kubenswrapper[4784]: I1205 13:14:47.010846 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed","Type":"ContainerStarted","Data":"ab2d3d04c45ba2c6a1799828201878dd408f9847abf8586a437e6da8741d8752"} Dec 05 13:14:47 crc kubenswrapper[4784]: I1205 13:14:47.067891 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-2-0" podStartSLOduration=2.719046766 podStartE2EDuration="3.067870522s" podCreationTimestamp="2025-12-05 13:14:44 +0000 UTC" firstStartedPulling="2025-12-05 13:14:45.435146714 +0000 UTC m=+2964.855213529" lastFinishedPulling="2025-12-05 13:14:45.78397046 +0000 UTC m=+2965.204037285" observedRunningTime="2025-12-05 13:14:47.023307992 +0000 UTC m=+2966.443374807" watchObservedRunningTime="2025-12-05 13:14:47.067870522 +0000 UTC m=+2966.487937327" Dec 05 13:14:47 crc kubenswrapper[4784]: I1205 13:14:47.071993 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-0" podStartSLOduration=3.071974251 podStartE2EDuration="3.071974251s" podCreationTimestamp="2025-12-05 13:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:14:47.057908402 +0000 UTC m=+2966.477975237" watchObservedRunningTime="2025-12-05 13:14:47.071974251 +0000 UTC m=+2966.492041066" Dec 05 13:14:47 crc kubenswrapper[4784]: I1205 13:14:47.099139 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.73991759 podStartE2EDuration="3.099121508s" podCreationTimestamp="2025-12-05 13:14:44 +0000 UTC" firstStartedPulling="2025-12-05 13:14:45.356311945 +0000 UTC m=+2964.776378760" lastFinishedPulling="2025-12-05 13:14:45.715515863 +0000 UTC m=+2965.135582678" observedRunningTime="2025-12-05 13:14:47.087443683 +0000 UTC m=+2966.507510518" watchObservedRunningTime="2025-12-05 13:14:47.099121508 +0000 UTC m=+2966.519188323" Dec 05 13:14:49 crc kubenswrapper[4784]: I1205 13:14:49.000082 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:14:49 crc kubenswrapper[4784]: E1205 13:14:49.001014 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:14:49 crc kubenswrapper[4784]: I1205 13:14:49.645617 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 13:14:49 crc kubenswrapper[4784]: I1205 13:14:49.704138 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:50 crc kubenswrapper[4784]: I1205 13:14:50.083630 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-0" Dec 05 13:14:54 crc kubenswrapper[4784]: I1205 13:14:54.856331 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 13:14:55 crc kubenswrapper[4784]: I1205 13:14:55.025504 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-2-0" Dec 05 13:14:55 crc kubenswrapper[4784]: I1205 13:14:55.381647 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-0" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.137487 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52"] Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.139910 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.142611 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.144075 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.151146 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52"] Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.211814 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d45wh\" (UniqueName: \"kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.212533 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.212834 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.315649 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.315767 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d45wh\" (UniqueName: \"kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.315786 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.317783 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.331980 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.332627 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d45wh\" (UniqueName: \"kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh\") pod \"collect-profiles-29415675-5br52\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.485351 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:00 crc kubenswrapper[4784]: I1205 13:15:00.988009 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52"] Dec 05 13:15:01 crc kubenswrapper[4784]: I1205 13:15:01.209992 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" event={"ID":"7e39ba41-37a6-47e9-8118-f67c0726721a","Type":"ContainerStarted","Data":"fd6e7cb1b58a61b3b46f83f259fd4bcde1992225d511ced23e89860170adfc48"} Dec 05 13:15:01 crc kubenswrapper[4784]: I1205 13:15:01.210461 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" event={"ID":"7e39ba41-37a6-47e9-8118-f67c0726721a","Type":"ContainerStarted","Data":"a6cdef5615e37e10bc929518998910dc0b9eeffd5e6b39bddfc4aa8bf62af345"} Dec 05 13:15:01 crc kubenswrapper[4784]: I1205 13:15:01.237102 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" podStartSLOduration=1.237077764 podStartE2EDuration="1.237077764s" podCreationTimestamp="2025-12-05 13:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:15:01.229159977 +0000 UTC m=+2980.649226802" watchObservedRunningTime="2025-12-05 13:15:01.237077764 +0000 UTC m=+2980.657144579" Dec 05 13:15:01 crc kubenswrapper[4784]: E1205 13:15:01.630423 4784 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e39ba41_37a6_47e9_8118_f67c0726721a.slice/crio-fd6e7cb1b58a61b3b46f83f259fd4bcde1992225d511ced23e89860170adfc48.scope\": RecentStats: unable to find data in memory cache]" Dec 05 13:15:03 crc kubenswrapper[4784]: I1205 13:15:03.231668 4784 generic.go:334] "Generic (PLEG): container finished" podID="7e39ba41-37a6-47e9-8118-f67c0726721a" containerID="fd6e7cb1b58a61b3b46f83f259fd4bcde1992225d511ced23e89860170adfc48" exitCode=0 Dec 05 13:15:03 crc kubenswrapper[4784]: I1205 13:15:03.231840 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" event={"ID":"7e39ba41-37a6-47e9-8118-f67c0726721a","Type":"ContainerDied","Data":"fd6e7cb1b58a61b3b46f83f259fd4bcde1992225d511ced23e89860170adfc48"} Dec 05 13:15:03 crc kubenswrapper[4784]: I1205 13:15:03.998879 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:15:03 crc kubenswrapper[4784]: E1205 13:15:03.999573 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.618141 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.746423 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d45wh\" (UniqueName: \"kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh\") pod \"7e39ba41-37a6-47e9-8118-f67c0726721a\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.746615 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume\") pod \"7e39ba41-37a6-47e9-8118-f67c0726721a\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.746657 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume\") pod \"7e39ba41-37a6-47e9-8118-f67c0726721a\" (UID: \"7e39ba41-37a6-47e9-8118-f67c0726721a\") " Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.747723 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume" (OuterVolumeSpecName: "config-volume") pod "7e39ba41-37a6-47e9-8118-f67c0726721a" (UID: "7e39ba41-37a6-47e9-8118-f67c0726721a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.752428 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh" (OuterVolumeSpecName: "kube-api-access-d45wh") pod "7e39ba41-37a6-47e9-8118-f67c0726721a" (UID: "7e39ba41-37a6-47e9-8118-f67c0726721a"). InnerVolumeSpecName "kube-api-access-d45wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.753366 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7e39ba41-37a6-47e9-8118-f67c0726721a" (UID: "7e39ba41-37a6-47e9-8118-f67c0726721a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.849018 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d45wh\" (UniqueName: \"kubernetes.io/projected/7e39ba41-37a6-47e9-8118-f67c0726721a-kube-api-access-d45wh\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.849055 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e39ba41-37a6-47e9-8118-f67c0726721a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:04 crc kubenswrapper[4784]: I1205 13:15:04.849063 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e39ba41-37a6-47e9-8118-f67c0726721a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:05 crc kubenswrapper[4784]: I1205 13:15:05.252955 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" event={"ID":"7e39ba41-37a6-47e9-8118-f67c0726721a","Type":"ContainerDied","Data":"a6cdef5615e37e10bc929518998910dc0b9eeffd5e6b39bddfc4aa8bf62af345"} Dec 05 13:15:05 crc kubenswrapper[4784]: I1205 13:15:05.253312 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6cdef5615e37e10bc929518998910dc0b9eeffd5e6b39bddfc4aa8bf62af345" Dec 05 13:15:05 crc kubenswrapper[4784]: I1205 13:15:05.253082 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52" Dec 05 13:15:05 crc kubenswrapper[4784]: I1205 13:15:05.320477 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj"] Dec 05 13:15:05 crc kubenswrapper[4784]: I1205 13:15:05.331670 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415630-zf2zj"] Dec 05 13:15:07 crc kubenswrapper[4784]: I1205 13:15:07.017736 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32012ed7-29ed-48ce-a59d-4d39e62e1672" path="/var/lib/kubelet/pods/32012ed7-29ed-48ce-a59d-4d39e62e1672/volumes" Dec 05 13:15:18 crc kubenswrapper[4784]: I1205 13:15:18.999360 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:15:19 crc kubenswrapper[4784]: E1205 13:15:19.001569 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:15:30 crc kubenswrapper[4784]: I1205 13:15:29.999714 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:15:30 crc kubenswrapper[4784]: E1205 13:15:30.000979 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:15:39 crc kubenswrapper[4784]: I1205 13:15:39.411569 4784 scope.go:117] "RemoveContainer" containerID="4d83f32dbe9e80cd2ae4ea65346b4894aa24b284d49351b941ae000059d79f57" Dec 05 13:15:45 crc kubenswrapper[4784]: I1205 13:15:45.000519 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:15:45 crc kubenswrapper[4784]: E1205 13:15:45.001395 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.455986 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.456893 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="prometheus" containerID="cri-o://84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c" gracePeriod=600 Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.457008 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="thanos-sidecar" containerID="cri-o://80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0" gracePeriod=600 Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.457062 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="config-reloader" containerID="cri-o://6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb" gracePeriod=600 Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.750352 4784 generic.go:334] "Generic (PLEG): container finished" podID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerID="80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0" exitCode=0 Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.750689 4784 generic.go:334] "Generic (PLEG): container finished" podID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerID="84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c" exitCode=0 Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.750714 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerDied","Data":"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0"} Dec 05 13:15:48 crc kubenswrapper[4784]: I1205 13:15:48.750743 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerDied","Data":"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c"} Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.468625 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613226 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613316 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613412 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613493 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613560 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnczw\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613595 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613652 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613685 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613725 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613769 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.613891 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\" (UID: \"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3\") " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.614488 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.614822 4784 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.620460 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw" (OuterVolumeSpecName: "kube-api-access-dnczw") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "kube-api-access-dnczw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.620848 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.620856 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out" (OuterVolumeSpecName: "config-out") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.620914 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.620933 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config" (OuterVolumeSpecName: "config") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.621355 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.622130 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.630753 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.655838 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.697347 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config" (OuterVolumeSpecName: "web-config") pod "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" (UID: "1bc3d9db-9849-427a-a9a7-ada6e23ca9e3"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717056 4784 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717116 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") on node \"crc\" " Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717131 4784 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717142 4784 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717153 4784 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717163 4784 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717171 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnczw\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-kube-api-access-dnczw\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717179 4784 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-config\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717200 4784 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.717209 4784 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.751870 4784 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.752001 4784 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd") on node "crc" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.760322 4784 generic.go:334] "Generic (PLEG): container finished" podID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerID="6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb" exitCode=0 Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.760359 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerDied","Data":"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb"} Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.760384 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1bc3d9db-9849-427a-a9a7-ada6e23ca9e3","Type":"ContainerDied","Data":"3684af970a2c3f57554a85ceb125b9cc1659940b40db1a61afb28c53e8dd6091"} Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.760394 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.760399 4784 scope.go:117] "RemoveContainer" containerID="80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.817033 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.819370 4784 reconciler_common.go:293] "Volume detached for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") on node \"crc\" DevicePath \"\"" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.819579 4784 scope.go:117] "RemoveContainer" containerID="6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.838296 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.849507 4784 scope.go:117] "RemoveContainer" containerID="84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.851890 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.852321 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="thanos-sidecar" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852338 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="thanos-sidecar" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.852350 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="prometheus" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852356 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="prometheus" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.852376 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e39ba41-37a6-47e9-8118-f67c0726721a" containerName="collect-profiles" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852383 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e39ba41-37a6-47e9-8118-f67c0726721a" containerName="collect-profiles" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.852401 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="config-reloader" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852407 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="config-reloader" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.852417 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="init-config-reloader" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852423 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="init-config-reloader" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852624 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e39ba41-37a6-47e9-8118-f67c0726721a" containerName="collect-profiles" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852636 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="config-reloader" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852657 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="prometheus" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.852663 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" containerName="thanos-sidecar" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.854448 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.868023 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.868110 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.868376 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.868533 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.874654 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-mphlc" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.880734 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.885292 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.885348 4784 scope.go:117] "RemoveContainer" containerID="520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.941180 4784 scope.go:117] "RemoveContainer" containerID="80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.941688 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0\": container with ID starting with 80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0 not found: ID does not exist" containerID="80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.941715 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0"} err="failed to get container status \"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0\": rpc error: code = NotFound desc = could not find container \"80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0\": container with ID starting with 80c22f328da394495de003fd6791ae5f3a9eb8306e42226a91e45feb1d2520b0 not found: ID does not exist" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.941736 4784 scope.go:117] "RemoveContainer" containerID="6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.942127 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb\": container with ID starting with 6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb not found: ID does not exist" containerID="6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.942168 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb"} err="failed to get container status \"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb\": rpc error: code = NotFound desc = could not find container \"6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb\": container with ID starting with 6313dbf91685fedd972b24d23634cc938cd231c609b0b6ef46b14b015416a5bb not found: ID does not exist" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.942217 4784 scope.go:117] "RemoveContainer" containerID="84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.944765 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c\": container with ID starting with 84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c not found: ID does not exist" containerID="84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.944793 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c"} err="failed to get container status \"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c\": rpc error: code = NotFound desc = could not find container \"84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c\": container with ID starting with 84c4c708e0559a93daba69693026e01f320821e16ca9f28d450be03dd5ab621c not found: ID does not exist" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.944809 4784 scope.go:117] "RemoveContainer" containerID="520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62" Dec 05 13:15:49 crc kubenswrapper[4784]: E1205 13:15:49.945048 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62\": container with ID starting with 520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62 not found: ID does not exist" containerID="520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62" Dec 05 13:15:49 crc kubenswrapper[4784]: I1205 13:15:49.945070 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62"} err="failed to get container status \"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62\": rpc error: code = NotFound desc = could not find container \"520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62\": container with ID starting with 520a6f5e6de6d5e04ddbd125f2f6f01e231622019e47cfaf93f2b494f6757e62 not found: ID does not exist" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023024 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023175 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023246 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023358 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023417 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfcdv\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-kube-api-access-jfcdv\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023443 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023471 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023521 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023770 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023817 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.023860 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125401 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125464 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125491 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125530 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125603 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125626 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125685 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125711 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfcdv\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-kube-api-access-jfcdv\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125735 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125763 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.125785 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.129233 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.134074 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.134217 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.134790 4784 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.134933 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b62a06a2697edf91f726f31fa2b1cc522cd33435aa7b897a0891f96c3d70ee18/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.135587 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.136079 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.137002 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.137611 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.146637 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.147132 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.172782 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfcdv\" (UniqueName: \"kubernetes.io/projected/f331c141-708a-4f4a-b0fa-e2cfcb1a7bed-kube-api-access-jfcdv\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.201416 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04a1f8eb-a1ee-49a8-9ddb-41c4006574fd\") pod \"prometheus-metric-storage-0\" (UID: \"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed\") " pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:50 crc kubenswrapper[4784]: I1205 13:15:50.490065 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.017368 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bc3d9db-9849-427a-a9a7-ada6e23ca9e3" path="/var/lib/kubelet/pods/1bc3d9db-9849-427a-a9a7-ada6e23ca9e3/volumes" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.025865 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.283639 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.287154 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.301647 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.471934 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.472018 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.472577 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9z66\" (UniqueName: \"kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.574168 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9z66\" (UniqueName: \"kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.574607 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.574668 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.575306 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.575328 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.595248 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9z66\" (UniqueName: \"kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66\") pod \"redhat-operators-fddfp\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.610159 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:15:51 crc kubenswrapper[4784]: I1205 13:15:51.789258 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerStarted","Data":"d4d79f6614c268d88722b5813bda86de955b621f211de326210a65c106a1b0e3"} Dec 05 13:15:52 crc kubenswrapper[4784]: I1205 13:15:52.124310 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:15:52 crc kubenswrapper[4784]: I1205 13:15:52.802648 4784 generic.go:334] "Generic (PLEG): container finished" podID="7c98b299-d990-460e-aa0b-c057806a2236" containerID="ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684" exitCode=0 Dec 05 13:15:52 crc kubenswrapper[4784]: I1205 13:15:52.802752 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerDied","Data":"ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684"} Dec 05 13:15:52 crc kubenswrapper[4784]: I1205 13:15:52.803433 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerStarted","Data":"ce08359f5ffefa8a227f9a45caff43af2e4e8fd5707c30b7001e6aa3bd69534e"} Dec 05 13:15:53 crc kubenswrapper[4784]: I1205 13:15:53.815610 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerStarted","Data":"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb"} Dec 05 13:15:55 crc kubenswrapper[4784]: I1205 13:15:55.842294 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerStarted","Data":"306e2caa4eda1f24c29c87c129febf596862bfd8bb9987ae53c41139cad3669f"} Dec 05 13:15:56 crc kubenswrapper[4784]: I1205 13:15:55.999024 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:15:56 crc kubenswrapper[4784]: E1205 13:15:55.999540 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:15:57 crc kubenswrapper[4784]: I1205 13:15:57.863905 4784 generic.go:334] "Generic (PLEG): container finished" podID="7c98b299-d990-460e-aa0b-c057806a2236" containerID="297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb" exitCode=0 Dec 05 13:15:57 crc kubenswrapper[4784]: I1205 13:15:57.864025 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerDied","Data":"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb"} Dec 05 13:15:58 crc kubenswrapper[4784]: I1205 13:15:58.878484 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerStarted","Data":"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f"} Dec 05 13:15:58 crc kubenswrapper[4784]: I1205 13:15:58.921735 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fddfp" podStartSLOduration=2.405625242 podStartE2EDuration="7.921703097s" podCreationTimestamp="2025-12-05 13:15:51 +0000 UTC" firstStartedPulling="2025-12-05 13:15:52.806366833 +0000 UTC m=+3032.226433648" lastFinishedPulling="2025-12-05 13:15:58.322444678 +0000 UTC m=+3037.742511503" observedRunningTime="2025-12-05 13:15:58.902635673 +0000 UTC m=+3038.322702488" watchObservedRunningTime="2025-12-05 13:15:58.921703097 +0000 UTC m=+3038.341769942" Dec 05 13:16:01 crc kubenswrapper[4784]: I1205 13:16:01.611727 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:01 crc kubenswrapper[4784]: I1205 13:16:01.612145 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:02 crc kubenswrapper[4784]: I1205 13:16:02.686789 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fddfp" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="registry-server" probeResult="failure" output=< Dec 05 13:16:02 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 13:16:02 crc kubenswrapper[4784]: > Dec 05 13:16:03 crc kubenswrapper[4784]: I1205 13:16:03.946124 4784 generic.go:334] "Generic (PLEG): container finished" podID="f331c141-708a-4f4a-b0fa-e2cfcb1a7bed" containerID="306e2caa4eda1f24c29c87c129febf596862bfd8bb9987ae53c41139cad3669f" exitCode=0 Dec 05 13:16:03 crc kubenswrapper[4784]: I1205 13:16:03.946207 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerDied","Data":"306e2caa4eda1f24c29c87c129febf596862bfd8bb9987ae53c41139cad3669f"} Dec 05 13:16:04 crc kubenswrapper[4784]: I1205 13:16:04.964719 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerStarted","Data":"aedec12f88df7907cc34d5c80c3d445372f076911cdd468082114a4546362032"} Dec 05 13:16:09 crc kubenswrapper[4784]: I1205 13:16:09.001003 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerStarted","Data":"fc7e01842fdf328c8be21a0cd4b95732ea03e07a536f0b6fefee29bd2e853548"} Dec 05 13:16:09 crc kubenswrapper[4784]: I1205 13:16:09.998700 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:16:09 crc kubenswrapper[4784]: E1205 13:16:09.999407 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:16:10 crc kubenswrapper[4784]: I1205 13:16:10.014770 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f331c141-708a-4f4a-b0fa-e2cfcb1a7bed","Type":"ContainerStarted","Data":"8e2bb6843bb53e047ea01573df727beba8b97738327740cebec8c9f56236bd41"} Dec 05 13:16:10 crc kubenswrapper[4784]: I1205 13:16:10.054391 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=21.054367496 podStartE2EDuration="21.054367496s" podCreationTimestamp="2025-12-05 13:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:16:10.042735723 +0000 UTC m=+3049.462802558" watchObservedRunningTime="2025-12-05 13:16:10.054367496 +0000 UTC m=+3049.474434321" Dec 05 13:16:10 crc kubenswrapper[4784]: I1205 13:16:10.491364 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 13:16:11 crc kubenswrapper[4784]: I1205 13:16:11.686952 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:11 crc kubenswrapper[4784]: I1205 13:16:11.763081 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:11 crc kubenswrapper[4784]: I1205 13:16:11.948605 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.047693 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fddfp" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="registry-server" containerID="cri-o://a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f" gracePeriod=2 Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.562493 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.757006 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities\") pod \"7c98b299-d990-460e-aa0b-c057806a2236\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.757716 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities" (OuterVolumeSpecName: "utilities") pod "7c98b299-d990-460e-aa0b-c057806a2236" (UID: "7c98b299-d990-460e-aa0b-c057806a2236"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.757774 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9z66\" (UniqueName: \"kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66\") pod \"7c98b299-d990-460e-aa0b-c057806a2236\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.758525 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content\") pod \"7c98b299-d990-460e-aa0b-c057806a2236\" (UID: \"7c98b299-d990-460e-aa0b-c057806a2236\") " Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.759000 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.774285 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66" (OuterVolumeSpecName: "kube-api-access-h9z66") pod "7c98b299-d990-460e-aa0b-c057806a2236" (UID: "7c98b299-d990-460e-aa0b-c057806a2236"). InnerVolumeSpecName "kube-api-access-h9z66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.861860 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9z66\" (UniqueName: \"kubernetes.io/projected/7c98b299-d990-460e-aa0b-c057806a2236-kube-api-access-h9z66\") on node \"crc\" DevicePath \"\"" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.906259 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c98b299-d990-460e-aa0b-c057806a2236" (UID: "7c98b299-d990-460e-aa0b-c057806a2236"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:16:13 crc kubenswrapper[4784]: I1205 13:16:13.964745 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c98b299-d990-460e-aa0b-c057806a2236-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.063079 4784 generic.go:334] "Generic (PLEG): container finished" podID="7c98b299-d990-460e-aa0b-c057806a2236" containerID="a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f" exitCode=0 Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.063128 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerDied","Data":"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f"} Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.063159 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fddfp" event={"ID":"7c98b299-d990-460e-aa0b-c057806a2236","Type":"ContainerDied","Data":"ce08359f5ffefa8a227f9a45caff43af2e4e8fd5707c30b7001e6aa3bd69534e"} Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.063154 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fddfp" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.063243 4784 scope.go:117] "RemoveContainer" containerID="a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.095047 4784 scope.go:117] "RemoveContainer" containerID="297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.102339 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.111375 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fddfp"] Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.144644 4784 scope.go:117] "RemoveContainer" containerID="ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.185544 4784 scope.go:117] "RemoveContainer" containerID="a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f" Dec 05 13:16:14 crc kubenswrapper[4784]: E1205 13:16:14.185863 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f\": container with ID starting with a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f not found: ID does not exist" containerID="a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.185895 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f"} err="failed to get container status \"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f\": rpc error: code = NotFound desc = could not find container \"a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f\": container with ID starting with a6488c80d9d73610552eb5cc000cdbd695f480c9fc74b7d54899f7d2881f889f not found: ID does not exist" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.185913 4784 scope.go:117] "RemoveContainer" containerID="297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb" Dec 05 13:16:14 crc kubenswrapper[4784]: E1205 13:16:14.186244 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb\": container with ID starting with 297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb not found: ID does not exist" containerID="297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.186374 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb"} err="failed to get container status \"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb\": rpc error: code = NotFound desc = could not find container \"297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb\": container with ID starting with 297ff09f3207606b353790f839a254d131daf8153eeef4aae62ddfc6167c0fbb not found: ID does not exist" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.186486 4784 scope.go:117] "RemoveContainer" containerID="ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684" Dec 05 13:16:14 crc kubenswrapper[4784]: E1205 13:16:14.186860 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684\": container with ID starting with ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684 not found: ID does not exist" containerID="ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684" Dec 05 13:16:14 crc kubenswrapper[4784]: I1205 13:16:14.186887 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684"} err="failed to get container status \"ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684\": rpc error: code = NotFound desc = could not find container \"ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684\": container with ID starting with ec2daa4ad957758d2d23a8be0e1b643dc8e695ea0ad321b3b3afee5707174684 not found: ID does not exist" Dec 05 13:16:15 crc kubenswrapper[4784]: I1205 13:16:15.019590 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c98b299-d990-460e-aa0b-c057806a2236" path="/var/lib/kubelet/pods/7c98b299-d990-460e-aa0b-c057806a2236/volumes" Dec 05 13:16:20 crc kubenswrapper[4784]: I1205 13:16:20.490563 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 13:16:20 crc kubenswrapper[4784]: I1205 13:16:20.509984 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 13:16:21 crc kubenswrapper[4784]: I1205 13:16:21.161444 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 13:16:23 crc kubenswrapper[4784]: I1205 13:16:23.000450 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:16:23 crc kubenswrapper[4784]: E1205 13:16:23.001338 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:16:34 crc kubenswrapper[4784]: I1205 13:16:34.999065 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:16:35 crc kubenswrapper[4784]: E1205 13:16:35.000032 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.021825 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 13:16:41 crc kubenswrapper[4784]: E1205 13:16:41.023078 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="extract-utilities" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.023102 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="extract-utilities" Dec 05 13:16:41 crc kubenswrapper[4784]: E1205 13:16:41.023123 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="registry-server" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.023132 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="registry-server" Dec 05 13:16:41 crc kubenswrapper[4784]: E1205 13:16:41.023153 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="extract-content" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.023163 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="extract-content" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.023431 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c98b299-d990-460e-aa0b-c057806a2236" containerName="registry-server" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.024341 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.028981 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mldzm" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.029005 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.037483 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.037537 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.045565 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084055 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8fb2\" (UniqueName: \"kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084114 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084235 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084306 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084335 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084376 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084410 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084468 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.084498 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186518 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186626 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186656 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186685 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186723 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186781 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186809 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186905 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8fb2\" (UniqueName: \"kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.186931 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.187649 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.188179 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.188657 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.188923 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.194369 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.195325 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.196634 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.220621 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.230337 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8fb2\" (UniqueName: \"kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.234611 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.361012 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.834175 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 13:16:41 crc kubenswrapper[4784]: W1205 13:16:41.837343 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dc746ad_99ec_4a42_8c05_3c45ece46906.slice/crio-5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf WatchSource:0}: Error finding container 5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf: Status 404 returned error can't find the container with id 5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf Dec 05 13:16:41 crc kubenswrapper[4784]: I1205 13:16:41.841244 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:16:42 crc kubenswrapper[4784]: I1205 13:16:42.357000 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9dc746ad-99ec-4a42-8c05-3c45ece46906","Type":"ContainerStarted","Data":"5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf"} Dec 05 13:16:45 crc kubenswrapper[4784]: I1205 13:16:45.999437 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:16:46 crc kubenswrapper[4784]: E1205 13:16:46.000322 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:16:53 crc kubenswrapper[4784]: I1205 13:16:53.522117 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9dc746ad-99ec-4a42-8c05-3c45ece46906","Type":"ContainerStarted","Data":"5280dcdd33903dc7d70c6b29f825f3947fbcb9e708ec9417504fc766659a6481"} Dec 05 13:16:53 crc kubenswrapper[4784]: I1205 13:16:53.558386 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.195146063 podStartE2EDuration="14.55836499s" podCreationTimestamp="2025-12-05 13:16:39 +0000 UTC" firstStartedPulling="2025-12-05 13:16:41.841044049 +0000 UTC m=+3081.261110854" lastFinishedPulling="2025-12-05 13:16:52.204262946 +0000 UTC m=+3091.624329781" observedRunningTime="2025-12-05 13:16:53.550106552 +0000 UTC m=+3092.970173377" watchObservedRunningTime="2025-12-05 13:16:53.55836499 +0000 UTC m=+3092.978431815" Dec 05 13:17:01 crc kubenswrapper[4784]: I1205 13:17:01.007684 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:17:01 crc kubenswrapper[4784]: E1205 13:17:01.008592 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:17:14 crc kubenswrapper[4784]: I1205 13:17:13.999181 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:17:14 crc kubenswrapper[4784]: E1205 13:17:13.999934 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:17:29 crc kubenswrapper[4784]: I1205 13:17:28.999380 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:17:29 crc kubenswrapper[4784]: E1205 13:17:29.000509 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:17:41 crc kubenswrapper[4784]: I1205 13:17:41.013522 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:17:41 crc kubenswrapper[4784]: E1205 13:17:41.014970 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:17:54 crc kubenswrapper[4784]: I1205 13:17:53.999953 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:17:54 crc kubenswrapper[4784]: E1205 13:17:54.000913 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:18:07 crc kubenswrapper[4784]: I1205 13:18:06.999565 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:18:07 crc kubenswrapper[4784]: E1205 13:18:07.000527 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:18:21 crc kubenswrapper[4784]: I1205 13:18:21.009406 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:18:21 crc kubenswrapper[4784]: E1205 13:18:21.010414 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:18:33 crc kubenswrapper[4784]: I1205 13:18:32.999683 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:18:33 crc kubenswrapper[4784]: E1205 13:18:33.000375 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:18:47 crc kubenswrapper[4784]: I1205 13:18:47.998826 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:18:48 crc kubenswrapper[4784]: E1205 13:18:47.999655 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:19:00 crc kubenswrapper[4784]: I1205 13:18:59.999537 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:19:00 crc kubenswrapper[4784]: I1205 13:19:00.367658 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09"} Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.301264 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.306023 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.334805 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crfr4\" (UniqueName: \"kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.335071 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.335261 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.335826 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.436048 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crfr4\" (UniqueName: \"kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.436156 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.436260 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.436889 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.436908 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.457120 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crfr4\" (UniqueName: \"kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4\") pod \"community-operators-krkcd\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:23 crc kubenswrapper[4784]: I1205 13:20:23.632530 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:24 crc kubenswrapper[4784]: I1205 13:20:24.403525 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:24 crc kubenswrapper[4784]: I1205 13:20:24.488614 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerStarted","Data":"acd981ca1589e8159c0f9e90e544ce46def0db976f15aef8529c513f2f40e129"} Dec 05 13:20:25 crc kubenswrapper[4784]: I1205 13:20:25.499482 4784 generic.go:334] "Generic (PLEG): container finished" podID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerID="122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09" exitCode=0 Dec 05 13:20:25 crc kubenswrapper[4784]: I1205 13:20:25.499790 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerDied","Data":"122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09"} Dec 05 13:20:27 crc kubenswrapper[4784]: I1205 13:20:27.520147 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerStarted","Data":"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41"} Dec 05 13:20:29 crc kubenswrapper[4784]: I1205 13:20:29.546759 4784 generic.go:334] "Generic (PLEG): container finished" podID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerID="f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41" exitCode=0 Dec 05 13:20:29 crc kubenswrapper[4784]: I1205 13:20:29.547224 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerDied","Data":"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41"} Dec 05 13:20:33 crc kubenswrapper[4784]: I1205 13:20:33.594641 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerStarted","Data":"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903"} Dec 05 13:20:33 crc kubenswrapper[4784]: I1205 13:20:33.613765 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-krkcd" podStartSLOduration=3.299486805 podStartE2EDuration="10.613749582s" podCreationTimestamp="2025-12-05 13:20:23 +0000 UTC" firstStartedPulling="2025-12-05 13:20:25.501884266 +0000 UTC m=+3304.921951081" lastFinishedPulling="2025-12-05 13:20:32.816147053 +0000 UTC m=+3312.236213858" observedRunningTime="2025-12-05 13:20:33.611379908 +0000 UTC m=+3313.031446723" watchObservedRunningTime="2025-12-05 13:20:33.613749582 +0000 UTC m=+3313.033816397" Dec 05 13:20:33 crc kubenswrapper[4784]: I1205 13:20:33.634343 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:33 crc kubenswrapper[4784]: I1205 13:20:33.634446 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:34 crc kubenswrapper[4784]: I1205 13:20:34.686693 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-krkcd" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="registry-server" probeResult="failure" output=< Dec 05 13:20:34 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 13:20:34 crc kubenswrapper[4784]: > Dec 05 13:20:43 crc kubenswrapper[4784]: I1205 13:20:43.690446 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:43 crc kubenswrapper[4784]: I1205 13:20:43.754719 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:43 crc kubenswrapper[4784]: I1205 13:20:43.927424 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:45 crc kubenswrapper[4784]: I1205 13:20:45.716281 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-krkcd" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="registry-server" containerID="cri-o://30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903" gracePeriod=2 Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.300442 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.424861 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crfr4\" (UniqueName: \"kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4\") pod \"d1b64f84-8012-4d81-9e46-667e3d83412b\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.424958 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content\") pod \"d1b64f84-8012-4d81-9e46-667e3d83412b\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.425009 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities\") pod \"d1b64f84-8012-4d81-9e46-667e3d83412b\" (UID: \"d1b64f84-8012-4d81-9e46-667e3d83412b\") " Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.426287 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities" (OuterVolumeSpecName: "utilities") pod "d1b64f84-8012-4d81-9e46-667e3d83412b" (UID: "d1b64f84-8012-4d81-9e46-667e3d83412b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.441511 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4" (OuterVolumeSpecName: "kube-api-access-crfr4") pod "d1b64f84-8012-4d81-9e46-667e3d83412b" (UID: "d1b64f84-8012-4d81-9e46-667e3d83412b"). InnerVolumeSpecName "kube-api-access-crfr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.480094 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1b64f84-8012-4d81-9e46-667e3d83412b" (UID: "d1b64f84-8012-4d81-9e46-667e3d83412b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.527302 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.527344 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b64f84-8012-4d81-9e46-667e3d83412b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.527360 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crfr4\" (UniqueName: \"kubernetes.io/projected/d1b64f84-8012-4d81-9e46-667e3d83412b-kube-api-access-crfr4\") on node \"crc\" DevicePath \"\"" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.731671 4784 generic.go:334] "Generic (PLEG): container finished" podID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerID="30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903" exitCode=0 Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.731735 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkcd" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.731755 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerDied","Data":"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903"} Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.732173 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkcd" event={"ID":"d1b64f84-8012-4d81-9e46-667e3d83412b","Type":"ContainerDied","Data":"acd981ca1589e8159c0f9e90e544ce46def0db976f15aef8529c513f2f40e129"} Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.732220 4784 scope.go:117] "RemoveContainer" containerID="30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.753478 4784 scope.go:117] "RemoveContainer" containerID="f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.780767 4784 scope.go:117] "RemoveContainer" containerID="122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.792172 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.805775 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-krkcd"] Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.825839 4784 scope.go:117] "RemoveContainer" containerID="30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903" Dec 05 13:20:46 crc kubenswrapper[4784]: E1205 13:20:46.826300 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903\": container with ID starting with 30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903 not found: ID does not exist" containerID="30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.826352 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903"} err="failed to get container status \"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903\": rpc error: code = NotFound desc = could not find container \"30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903\": container with ID starting with 30922b19d265ed6002fc7d2350a9649c9689a24c1e5873d3b02bc8e532d0f903 not found: ID does not exist" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.826388 4784 scope.go:117] "RemoveContainer" containerID="f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41" Dec 05 13:20:46 crc kubenswrapper[4784]: E1205 13:20:46.826830 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41\": container with ID starting with f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41 not found: ID does not exist" containerID="f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.826860 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41"} err="failed to get container status \"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41\": rpc error: code = NotFound desc = could not find container \"f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41\": container with ID starting with f61772e8e8a7e641fe54dff5e54bb12a69659c9900dd9e8d6001b07978696e41 not found: ID does not exist" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.826877 4784 scope.go:117] "RemoveContainer" containerID="122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09" Dec 05 13:20:46 crc kubenswrapper[4784]: E1205 13:20:46.827127 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09\": container with ID starting with 122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09 not found: ID does not exist" containerID="122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09" Dec 05 13:20:46 crc kubenswrapper[4784]: I1205 13:20:46.827153 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09"} err="failed to get container status \"122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09\": rpc error: code = NotFound desc = could not find container \"122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09\": container with ID starting with 122ca04cfd5819c6d3dd7f3cc8d8a746913ffc598acba80c68c04b4a5d93ac09 not found: ID does not exist" Dec 05 13:20:47 crc kubenswrapper[4784]: I1205 13:20:47.010059 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" path="/var/lib/kubelet/pods/d1b64f84-8012-4d81-9e46-667e3d83412b/volumes" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.024545 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:06 crc kubenswrapper[4784]: E1205 13:21:06.025552 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="extract-utilities" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.025566 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="extract-utilities" Dec 05 13:21:06 crc kubenswrapper[4784]: E1205 13:21:06.025589 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="extract-content" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.025594 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="extract-content" Dec 05 13:21:06 crc kubenswrapper[4784]: E1205 13:21:06.025610 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="registry-server" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.025617 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="registry-server" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.025928 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b64f84-8012-4d81-9e46-667e3d83412b" containerName="registry-server" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.028824 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.052652 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.100122 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.100384 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz4wt\" (UniqueName: \"kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.100834 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.202534 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.202664 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz4wt\" (UniqueName: \"kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.202789 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.203108 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.203335 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.227122 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz4wt\" (UniqueName: \"kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt\") pod \"redhat-marketplace-q8hxd\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:06 crc kubenswrapper[4784]: I1205 13:21:06.357913 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:07 crc kubenswrapper[4784]: I1205 13:21:07.027780 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:07 crc kubenswrapper[4784]: I1205 13:21:07.940060 4784 generic.go:334] "Generic (PLEG): container finished" podID="b8a87e64-05c1-473f-9836-83270764695f" containerID="7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf" exitCode=0 Dec 05 13:21:07 crc kubenswrapper[4784]: I1205 13:21:07.940126 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerDied","Data":"7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf"} Dec 05 13:21:07 crc kubenswrapper[4784]: I1205 13:21:07.940696 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerStarted","Data":"634bbfa961202b6d408fce6e34fd3db228f8938cf38479364281a3f1891d4eb1"} Dec 05 13:21:08 crc kubenswrapper[4784]: I1205 13:21:08.955537 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerStarted","Data":"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201"} Dec 05 13:21:09 crc kubenswrapper[4784]: I1205 13:21:09.972574 4784 generic.go:334] "Generic (PLEG): container finished" podID="b8a87e64-05c1-473f-9836-83270764695f" containerID="6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201" exitCode=0 Dec 05 13:21:09 crc kubenswrapper[4784]: I1205 13:21:09.972623 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerDied","Data":"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201"} Dec 05 13:21:10 crc kubenswrapper[4784]: I1205 13:21:10.987559 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerStarted","Data":"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8"} Dec 05 13:21:11 crc kubenswrapper[4784]: I1205 13:21:11.020759 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q8hxd" podStartSLOduration=3.611640758 podStartE2EDuration="6.020735933s" podCreationTimestamp="2025-12-05 13:21:05 +0000 UTC" firstStartedPulling="2025-12-05 13:21:07.943227381 +0000 UTC m=+3347.363294216" lastFinishedPulling="2025-12-05 13:21:10.352322556 +0000 UTC m=+3349.772389391" observedRunningTime="2025-12-05 13:21:11.007773469 +0000 UTC m=+3350.427840294" watchObservedRunningTime="2025-12-05 13:21:11.020735933 +0000 UTC m=+3350.440802768" Dec 05 13:21:16 crc kubenswrapper[4784]: I1205 13:21:16.358845 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:16 crc kubenswrapper[4784]: I1205 13:21:16.359453 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:16 crc kubenswrapper[4784]: I1205 13:21:16.417363 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:17 crc kubenswrapper[4784]: I1205 13:21:17.094267 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:17 crc kubenswrapper[4784]: I1205 13:21:17.140042 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.064141 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q8hxd" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="registry-server" containerID="cri-o://947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8" gracePeriod=2 Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.533320 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.603421 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities\") pod \"b8a87e64-05c1-473f-9836-83270764695f\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.603779 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz4wt\" (UniqueName: \"kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt\") pod \"b8a87e64-05c1-473f-9836-83270764695f\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.603990 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content\") pod \"b8a87e64-05c1-473f-9836-83270764695f\" (UID: \"b8a87e64-05c1-473f-9836-83270764695f\") " Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.604310 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities" (OuterVolumeSpecName: "utilities") pod "b8a87e64-05c1-473f-9836-83270764695f" (UID: "b8a87e64-05c1-473f-9836-83270764695f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.604704 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.611742 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt" (OuterVolumeSpecName: "kube-api-access-jz4wt") pod "b8a87e64-05c1-473f-9836-83270764695f" (UID: "b8a87e64-05c1-473f-9836-83270764695f"). InnerVolumeSpecName "kube-api-access-jz4wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.634348 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8a87e64-05c1-473f-9836-83270764695f" (UID: "b8a87e64-05c1-473f-9836-83270764695f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.706768 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a87e64-05c1-473f-9836-83270764695f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:21:19 crc kubenswrapper[4784]: I1205 13:21:19.707095 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz4wt\" (UniqueName: \"kubernetes.io/projected/b8a87e64-05c1-473f-9836-83270764695f-kube-api-access-jz4wt\") on node \"crc\" DevicePath \"\"" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.077830 4784 generic.go:334] "Generic (PLEG): container finished" podID="b8a87e64-05c1-473f-9836-83270764695f" containerID="947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8" exitCode=0 Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.077880 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerDied","Data":"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8"} Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.077911 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q8hxd" event={"ID":"b8a87e64-05c1-473f-9836-83270764695f","Type":"ContainerDied","Data":"634bbfa961202b6d408fce6e34fd3db228f8938cf38479364281a3f1891d4eb1"} Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.077929 4784 scope.go:117] "RemoveContainer" containerID="947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.077955 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q8hxd" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.114613 4784 scope.go:117] "RemoveContainer" containerID="6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.135441 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.148362 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q8hxd"] Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.158990 4784 scope.go:117] "RemoveContainer" containerID="7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.229167 4784 scope.go:117] "RemoveContainer" containerID="947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8" Dec 05 13:21:20 crc kubenswrapper[4784]: E1205 13:21:20.229687 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8\": container with ID starting with 947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8 not found: ID does not exist" containerID="947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.229725 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8"} err="failed to get container status \"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8\": rpc error: code = NotFound desc = could not find container \"947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8\": container with ID starting with 947839b22b4d0858c1cb2f53513aafcf9fde5b4ba4dea572551878a96bcdd9d8 not found: ID does not exist" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.229751 4784 scope.go:117] "RemoveContainer" containerID="6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201" Dec 05 13:21:20 crc kubenswrapper[4784]: E1205 13:21:20.230169 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201\": container with ID starting with 6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201 not found: ID does not exist" containerID="6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.230219 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201"} err="failed to get container status \"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201\": rpc error: code = NotFound desc = could not find container \"6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201\": container with ID starting with 6baa648005347c689718add5b33427ec3f2bb49713409037962ddca2b34fb201 not found: ID does not exist" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.230246 4784 scope.go:117] "RemoveContainer" containerID="7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf" Dec 05 13:21:20 crc kubenswrapper[4784]: E1205 13:21:20.230554 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf\": container with ID starting with 7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf not found: ID does not exist" containerID="7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf" Dec 05 13:21:20 crc kubenswrapper[4784]: I1205 13:21:20.230585 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf"} err="failed to get container status \"7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf\": rpc error: code = NotFound desc = could not find container \"7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf\": container with ID starting with 7ce5f7a401f35366ee185a498fb50043e39e99c66afed10b58e4b70582d1d8bf not found: ID does not exist" Dec 05 13:21:21 crc kubenswrapper[4784]: I1205 13:21:21.010426 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8a87e64-05c1-473f-9836-83270764695f" path="/var/lib/kubelet/pods/b8a87e64-05c1-473f-9836-83270764695f/volumes" Dec 05 13:21:29 crc kubenswrapper[4784]: I1205 13:21:29.572147 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:21:29 crc kubenswrapper[4784]: I1205 13:21:29.572959 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:21:59 crc kubenswrapper[4784]: I1205 13:21:59.572479 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:21:59 crc kubenswrapper[4784]: I1205 13:21:59.573015 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:22:29 crc kubenswrapper[4784]: I1205 13:22:29.572730 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:22:29 crc kubenswrapper[4784]: I1205 13:22:29.574422 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:22:29 crc kubenswrapper[4784]: I1205 13:22:29.574481 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:22:29 crc kubenswrapper[4784]: I1205 13:22:29.575207 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:22:29 crc kubenswrapper[4784]: I1205 13:22:29.575269 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09" gracePeriod=600 Dec 05 13:22:30 crc kubenswrapper[4784]: I1205 13:22:30.037834 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09" exitCode=0 Dec 05 13:22:30 crc kubenswrapper[4784]: I1205 13:22:30.037910 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09"} Dec 05 13:22:30 crc kubenswrapper[4784]: I1205 13:22:30.038263 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21"} Dec 05 13:22:30 crc kubenswrapper[4784]: I1205 13:22:30.038288 4784 scope.go:117] "RemoveContainer" containerID="f945d817e6fa3117971bcb82d086dfe6a7c65c33ba45f235aec8fd289f80bd9f" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.284865 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:22:52 crc kubenswrapper[4784]: E1205 13:22:52.285923 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="extract-utilities" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.285940 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="extract-utilities" Dec 05 13:22:52 crc kubenswrapper[4784]: E1205 13:22:52.285972 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="extract-content" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.285980 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="extract-content" Dec 05 13:22:52 crc kubenswrapper[4784]: E1205 13:22:52.285996 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="registry-server" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.286004 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="registry-server" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.286263 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8a87e64-05c1-473f-9836-83270764695f" containerName="registry-server" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.287783 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.303037 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.319977 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.320058 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.320107 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvfmp\" (UniqueName: \"kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.421926 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.422001 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.422055 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvfmp\" (UniqueName: \"kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.422474 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.422536 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.456379 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvfmp\" (UniqueName: \"kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp\") pod \"certified-operators-zffmz\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:52 crc kubenswrapper[4784]: I1205 13:22:52.619813 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:22:53 crc kubenswrapper[4784]: I1205 13:22:53.165886 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:22:53 crc kubenswrapper[4784]: I1205 13:22:53.309092 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerStarted","Data":"30b584178932b1f4406a7b4233a27cba3a40367a94721a529d77559eb9aee824"} Dec 05 13:22:54 crc kubenswrapper[4784]: I1205 13:22:54.330672 4784 generic.go:334] "Generic (PLEG): container finished" podID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerID="c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236" exitCode=0 Dec 05 13:22:54 crc kubenswrapper[4784]: I1205 13:22:54.331537 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerDied","Data":"c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236"} Dec 05 13:22:54 crc kubenswrapper[4784]: I1205 13:22:54.334512 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:22:56 crc kubenswrapper[4784]: I1205 13:22:56.358702 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerStarted","Data":"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4"} Dec 05 13:22:59 crc kubenswrapper[4784]: I1205 13:22:59.388717 4784 generic.go:334] "Generic (PLEG): container finished" podID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerID="4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4" exitCode=0 Dec 05 13:22:59 crc kubenswrapper[4784]: I1205 13:22:59.388759 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerDied","Data":"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4"} Dec 05 13:23:01 crc kubenswrapper[4784]: I1205 13:23:01.411392 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerStarted","Data":"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f"} Dec 05 13:23:01 crc kubenswrapper[4784]: I1205 13:23:01.435568 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zffmz" podStartSLOduration=3.272898239 podStartE2EDuration="9.435545997s" podCreationTimestamp="2025-12-05 13:22:52 +0000 UTC" firstStartedPulling="2025-12-05 13:22:54.334044254 +0000 UTC m=+3453.754111069" lastFinishedPulling="2025-12-05 13:23:00.496692002 +0000 UTC m=+3459.916758827" observedRunningTime="2025-12-05 13:23:01.428454007 +0000 UTC m=+3460.848520832" watchObservedRunningTime="2025-12-05 13:23:01.435545997 +0000 UTC m=+3460.855612802" Dec 05 13:23:02 crc kubenswrapper[4784]: I1205 13:23:02.619985 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:02 crc kubenswrapper[4784]: I1205 13:23:02.620329 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:02 crc kubenswrapper[4784]: I1205 13:23:02.673499 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:12 crc kubenswrapper[4784]: I1205 13:23:12.680907 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:12 crc kubenswrapper[4784]: I1205 13:23:12.740062 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:23:13 crc kubenswrapper[4784]: I1205 13:23:13.540014 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zffmz" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="registry-server" containerID="cri-o://57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f" gracePeriod=2 Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.060958 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.186783 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities\") pod \"f26612d0-d6ff-40e8-bed9-17638f40072f\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.187068 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvfmp\" (UniqueName: \"kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp\") pod \"f26612d0-d6ff-40e8-bed9-17638f40072f\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.187180 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content\") pod \"f26612d0-d6ff-40e8-bed9-17638f40072f\" (UID: \"f26612d0-d6ff-40e8-bed9-17638f40072f\") " Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.188399 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities" (OuterVolumeSpecName: "utilities") pod "f26612d0-d6ff-40e8-bed9-17638f40072f" (UID: "f26612d0-d6ff-40e8-bed9-17638f40072f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.196308 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp" (OuterVolumeSpecName: "kube-api-access-mvfmp") pod "f26612d0-d6ff-40e8-bed9-17638f40072f" (UID: "f26612d0-d6ff-40e8-bed9-17638f40072f"). InnerVolumeSpecName "kube-api-access-mvfmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.273307 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f26612d0-d6ff-40e8-bed9-17638f40072f" (UID: "f26612d0-d6ff-40e8-bed9-17638f40072f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.289290 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvfmp\" (UniqueName: \"kubernetes.io/projected/f26612d0-d6ff-40e8-bed9-17638f40072f-kube-api-access-mvfmp\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.289327 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.289341 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f26612d0-d6ff-40e8-bed9-17638f40072f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.550478 4784 generic.go:334] "Generic (PLEG): container finished" podID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerID="57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f" exitCode=0 Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.550514 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerDied","Data":"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f"} Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.550538 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zffmz" event={"ID":"f26612d0-d6ff-40e8-bed9-17638f40072f","Type":"ContainerDied","Data":"30b584178932b1f4406a7b4233a27cba3a40367a94721a529d77559eb9aee824"} Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.550555 4784 scope.go:117] "RemoveContainer" containerID="57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.550662 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zffmz" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.603100 4784 scope.go:117] "RemoveContainer" containerID="4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.606980 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.621229 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zffmz"] Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.648848 4784 scope.go:117] "RemoveContainer" containerID="c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.701467 4784 scope.go:117] "RemoveContainer" containerID="57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f" Dec 05 13:23:14 crc kubenswrapper[4784]: E1205 13:23:14.701871 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f\": container with ID starting with 57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f not found: ID does not exist" containerID="57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.701905 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f"} err="failed to get container status \"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f\": rpc error: code = NotFound desc = could not find container \"57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f\": container with ID starting with 57801784488fc622d1e9f0f44c3581f55778054d3622e79f5f94cb8f79a4654f not found: ID does not exist" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.701926 4784 scope.go:117] "RemoveContainer" containerID="4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4" Dec 05 13:23:14 crc kubenswrapper[4784]: E1205 13:23:14.702225 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4\": container with ID starting with 4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4 not found: ID does not exist" containerID="4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.702378 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4"} err="failed to get container status \"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4\": rpc error: code = NotFound desc = could not find container \"4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4\": container with ID starting with 4c603227ed0e237d32e2fb11bbba76289e98e93983fc5499a9b1036f34827ae4 not found: ID does not exist" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.702517 4784 scope.go:117] "RemoveContainer" containerID="c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236" Dec 05 13:23:14 crc kubenswrapper[4784]: E1205 13:23:14.702969 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236\": container with ID starting with c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236 not found: ID does not exist" containerID="c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236" Dec 05 13:23:14 crc kubenswrapper[4784]: I1205 13:23:14.702992 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236"} err="failed to get container status \"c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236\": rpc error: code = NotFound desc = could not find container \"c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236\": container with ID starting with c57014b839a8c6689922ab64feb2eefbfd53cd418c765c9607da93c1a25ac236 not found: ID does not exist" Dec 05 13:23:15 crc kubenswrapper[4784]: I1205 13:23:15.020735 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" path="/var/lib/kubelet/pods/f26612d0-d6ff-40e8-bed9-17638f40072f/volumes" Dec 05 13:24:29 crc kubenswrapper[4784]: I1205 13:24:29.572377 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:24:29 crc kubenswrapper[4784]: I1205 13:24:29.573787 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:24:59 crc kubenswrapper[4784]: I1205 13:24:59.572168 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:24:59 crc kubenswrapper[4784]: I1205 13:24:59.574151 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.572130 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.572731 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.572780 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.573562 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.573619 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" gracePeriod=600 Dec 05 13:25:29 crc kubenswrapper[4784]: E1205 13:25:29.697042 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.998660 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" exitCode=0 Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.998698 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21"} Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.999248 4784 scope.go:117] "RemoveContainer" containerID="237c38110011a6a9052c44b5205631856f885090c2cf396eafbe5c5b756e9b09" Dec 05 13:25:29 crc kubenswrapper[4784]: I1205 13:25:29.999547 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:25:29 crc kubenswrapper[4784]: E1205 13:25:29.999835 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:25:44 crc kubenswrapper[4784]: I1205 13:25:43.999432 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:25:44 crc kubenswrapper[4784]: E1205 13:25:44.000369 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:25:55 crc kubenswrapper[4784]: I1205 13:25:55.998665 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:25:56 crc kubenswrapper[4784]: E1205 13:25:55.999513 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:26:06 crc kubenswrapper[4784]: I1205 13:26:06.998732 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:26:07 crc kubenswrapper[4784]: E1205 13:26:06.999568 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:26:19 crc kubenswrapper[4784]: I1205 13:26:18.999940 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:26:19 crc kubenswrapper[4784]: E1205 13:26:19.001160 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.395627 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:26 crc kubenswrapper[4784]: E1205 13:26:26.396795 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="extract-content" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.396813 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="extract-content" Dec 05 13:26:26 crc kubenswrapper[4784]: E1205 13:26:26.396854 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="extract-utilities" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.396863 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="extract-utilities" Dec 05 13:26:26 crc kubenswrapper[4784]: E1205 13:26:26.396906 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="registry-server" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.396915 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="registry-server" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.397171 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f26612d0-d6ff-40e8-bed9-17638f40072f" containerName="registry-server" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.399131 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.408487 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.486511 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r66r\" (UniqueName: \"kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.486792 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.487107 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.589782 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.589914 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r66r\" (UniqueName: \"kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.590029 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.590562 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.590562 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.618641 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r66r\" (UniqueName: \"kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r\") pod \"redhat-operators-xmrl4\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:26 crc kubenswrapper[4784]: I1205 13:26:26.734171 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:27 crc kubenswrapper[4784]: I1205 13:26:27.266429 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:27 crc kubenswrapper[4784]: I1205 13:26:27.621705 4784 generic.go:334] "Generic (PLEG): container finished" podID="c64d36be-4e69-4571-863f-fa1b532950b2" containerID="d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50" exitCode=0 Dec 05 13:26:27 crc kubenswrapper[4784]: I1205 13:26:27.621811 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerDied","Data":"d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50"} Dec 05 13:26:27 crc kubenswrapper[4784]: I1205 13:26:27.622069 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerStarted","Data":"64c6cbf3e4de5dd0f08bb9e89a1b0a34542e77ac2b9b28b24635455b07ceac7d"} Dec 05 13:26:28 crc kubenswrapper[4784]: I1205 13:26:28.635382 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerStarted","Data":"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01"} Dec 05 13:26:30 crc kubenswrapper[4784]: I1205 13:26:29.999560 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:26:30 crc kubenswrapper[4784]: E1205 13:26:30.000754 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:26:31 crc kubenswrapper[4784]: I1205 13:26:31.666321 4784 generic.go:334] "Generic (PLEG): container finished" podID="c64d36be-4e69-4571-863f-fa1b532950b2" containerID="56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01" exitCode=0 Dec 05 13:26:31 crc kubenswrapper[4784]: I1205 13:26:31.666668 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerDied","Data":"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01"} Dec 05 13:26:32 crc kubenswrapper[4784]: I1205 13:26:32.680438 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerStarted","Data":"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482"} Dec 05 13:26:32 crc kubenswrapper[4784]: I1205 13:26:32.715370 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xmrl4" podStartSLOduration=2.237416399 podStartE2EDuration="6.715352599s" podCreationTimestamp="2025-12-05 13:26:26 +0000 UTC" firstStartedPulling="2025-12-05 13:26:27.623621687 +0000 UTC m=+3667.043688512" lastFinishedPulling="2025-12-05 13:26:32.101557857 +0000 UTC m=+3671.521624712" observedRunningTime="2025-12-05 13:26:32.714179172 +0000 UTC m=+3672.134245987" watchObservedRunningTime="2025-12-05 13:26:32.715352599 +0000 UTC m=+3672.135419414" Dec 05 13:26:36 crc kubenswrapper[4784]: I1205 13:26:36.735151 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:36 crc kubenswrapper[4784]: I1205 13:26:36.735803 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:37 crc kubenswrapper[4784]: I1205 13:26:37.777276 4784 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xmrl4" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="registry-server" probeResult="failure" output=< Dec 05 13:26:37 crc kubenswrapper[4784]: timeout: failed to connect service ":50051" within 1s Dec 05 13:26:37 crc kubenswrapper[4784]: > Dec 05 13:26:43 crc kubenswrapper[4784]: I1205 13:26:42.999459 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:26:43 crc kubenswrapper[4784]: E1205 13:26:43.000360 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:26:46 crc kubenswrapper[4784]: I1205 13:26:46.777181 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:46 crc kubenswrapper[4784]: I1205 13:26:46.824252 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:47 crc kubenswrapper[4784]: I1205 13:26:47.018548 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:47 crc kubenswrapper[4784]: I1205 13:26:47.845165 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xmrl4" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="registry-server" containerID="cri-o://ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482" gracePeriod=2 Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.517063 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.555296 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r66r\" (UniqueName: \"kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r\") pod \"c64d36be-4e69-4571-863f-fa1b532950b2\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.555451 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities\") pod \"c64d36be-4e69-4571-863f-fa1b532950b2\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.555621 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content\") pod \"c64d36be-4e69-4571-863f-fa1b532950b2\" (UID: \"c64d36be-4e69-4571-863f-fa1b532950b2\") " Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.556980 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities" (OuterVolumeSpecName: "utilities") pod "c64d36be-4e69-4571-863f-fa1b532950b2" (UID: "c64d36be-4e69-4571-863f-fa1b532950b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.567594 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r" (OuterVolumeSpecName: "kube-api-access-6r66r") pod "c64d36be-4e69-4571-863f-fa1b532950b2" (UID: "c64d36be-4e69-4571-863f-fa1b532950b2"). InnerVolumeSpecName "kube-api-access-6r66r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.659397 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r66r\" (UniqueName: \"kubernetes.io/projected/c64d36be-4e69-4571-863f-fa1b532950b2-kube-api-access-6r66r\") on node \"crc\" DevicePath \"\"" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.659444 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.667464 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c64d36be-4e69-4571-863f-fa1b532950b2" (UID: "c64d36be-4e69-4571-863f-fa1b532950b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.760978 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c64d36be-4e69-4571-863f-fa1b532950b2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.856685 4784 generic.go:334] "Generic (PLEG): container finished" podID="c64d36be-4e69-4571-863f-fa1b532950b2" containerID="ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482" exitCode=0 Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.856735 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerDied","Data":"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482"} Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.856759 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmrl4" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.856775 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmrl4" event={"ID":"c64d36be-4e69-4571-863f-fa1b532950b2","Type":"ContainerDied","Data":"64c6cbf3e4de5dd0f08bb9e89a1b0a34542e77ac2b9b28b24635455b07ceac7d"} Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.856796 4784 scope.go:117] "RemoveContainer" containerID="ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.881428 4784 scope.go:117] "RemoveContainer" containerID="56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.890541 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.902981 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xmrl4"] Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.918611 4784 scope.go:117] "RemoveContainer" containerID="d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.974907 4784 scope.go:117] "RemoveContainer" containerID="ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482" Dec 05 13:26:48 crc kubenswrapper[4784]: E1205 13:26:48.975359 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482\": container with ID starting with ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482 not found: ID does not exist" containerID="ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.975392 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482"} err="failed to get container status \"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482\": rpc error: code = NotFound desc = could not find container \"ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482\": container with ID starting with ca395a2f97efd2bd5ae7dbc680b4d13adbef8719c825f59cf318e7d72cfec482 not found: ID does not exist" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.975413 4784 scope.go:117] "RemoveContainer" containerID="56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01" Dec 05 13:26:48 crc kubenswrapper[4784]: E1205 13:26:48.975709 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01\": container with ID starting with 56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01 not found: ID does not exist" containerID="56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.975759 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01"} err="failed to get container status \"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01\": rpc error: code = NotFound desc = could not find container \"56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01\": container with ID starting with 56b86ef902f41805adf7fb11f9aaf704a58a3a6eaf4b709f25bfffbf99e69e01 not found: ID does not exist" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.975794 4784 scope.go:117] "RemoveContainer" containerID="d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50" Dec 05 13:26:48 crc kubenswrapper[4784]: E1205 13:26:48.976355 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50\": container with ID starting with d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50 not found: ID does not exist" containerID="d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50" Dec 05 13:26:48 crc kubenswrapper[4784]: I1205 13:26:48.976380 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50"} err="failed to get container status \"d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50\": rpc error: code = NotFound desc = could not find container \"d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50\": container with ID starting with d4f6ac45a0821b06c1c9aac68fe2b2fed0bd7b71b99ad2f361c5f0978c7b6e50 not found: ID does not exist" Dec 05 13:26:49 crc kubenswrapper[4784]: I1205 13:26:49.012124 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" path="/var/lib/kubelet/pods/c64d36be-4e69-4571-863f-fa1b532950b2/volumes" Dec 05 13:26:56 crc kubenswrapper[4784]: I1205 13:26:56.998884 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:26:57 crc kubenswrapper[4784]: E1205 13:26:56.999699 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:27:08 crc kubenswrapper[4784]: I1205 13:27:08.999099 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:27:09 crc kubenswrapper[4784]: E1205 13:27:09.000015 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:27:23 crc kubenswrapper[4784]: I1205 13:27:23.998520 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:27:24 crc kubenswrapper[4784]: E1205 13:27:23.999330 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:27:34 crc kubenswrapper[4784]: I1205 13:27:34.999049 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:27:35 crc kubenswrapper[4784]: E1205 13:27:34.999847 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:27:50 crc kubenswrapper[4784]: I1205 13:27:49.999637 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:27:50 crc kubenswrapper[4784]: E1205 13:27:50.000566 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:28:02 crc kubenswrapper[4784]: I1205 13:28:02.998802 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:28:03 crc kubenswrapper[4784]: E1205 13:28:02.999555 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:28:13 crc kubenswrapper[4784]: I1205 13:28:13.999271 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:28:14 crc kubenswrapper[4784]: E1205 13:28:14.001409 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:28:24 crc kubenswrapper[4784]: I1205 13:28:24.999018 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:28:25 crc kubenswrapper[4784]: E1205 13:28:24.999805 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:28:39 crc kubenswrapper[4784]: I1205 13:28:39.998963 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:28:40 crc kubenswrapper[4784]: E1205 13:28:39.999708 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:28:51 crc kubenswrapper[4784]: I1205 13:28:51.018435 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:28:51 crc kubenswrapper[4784]: E1205 13:28:51.019442 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:29:02 crc kubenswrapper[4784]: I1205 13:29:02.000040 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:29:02 crc kubenswrapper[4784]: E1205 13:29:02.001703 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:29:16 crc kubenswrapper[4784]: I1205 13:29:15.999869 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:29:16 crc kubenswrapper[4784]: E1205 13:29:16.000750 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:29:28 crc kubenswrapper[4784]: I1205 13:29:28.998940 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:29:29 crc kubenswrapper[4784]: E1205 13:29:28.999677 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:29:43 crc kubenswrapper[4784]: I1205 13:29:43.000173 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:29:43 crc kubenswrapper[4784]: E1205 13:29:43.001038 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:29:54 crc kubenswrapper[4784]: I1205 13:29:54.999226 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:29:55 crc kubenswrapper[4784]: E1205 13:29:55.000134 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.197758 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl"] Dec 05 13:30:00 crc kubenswrapper[4784]: E1205 13:30:00.198640 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="extract-utilities" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.198660 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="extract-utilities" Dec 05 13:30:00 crc kubenswrapper[4784]: E1205 13:30:00.198679 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="extract-content" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.198685 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="extract-content" Dec 05 13:30:00 crc kubenswrapper[4784]: E1205 13:30:00.198702 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.198708 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.198985 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c64d36be-4e69-4571-863f-fa1b532950b2" containerName="registry-server" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.199879 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.203982 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.204284 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.210771 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl"] Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.224416 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.224554 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.224612 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjxnc\" (UniqueName: \"kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.326398 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.326694 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjxnc\" (UniqueName: \"kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.327180 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.327896 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.340989 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.349004 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjxnc\" (UniqueName: \"kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc\") pod \"collect-profiles-29415690-6d4tl\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:00 crc kubenswrapper[4784]: I1205 13:30:00.529331 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:01 crc kubenswrapper[4784]: I1205 13:30:01.510880 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl"] Dec 05 13:30:01 crc kubenswrapper[4784]: I1205 13:30:01.840866 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" event={"ID":"66ab9e30-b279-4ffd-a2df-380f69151467","Type":"ContainerStarted","Data":"56b4d1a2c2a3b3c1e79a15d2cc3cfdfba6af1297c540984e1575269e209520b3"} Dec 05 13:30:01 crc kubenswrapper[4784]: I1205 13:30:01.841243 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" event={"ID":"66ab9e30-b279-4ffd-a2df-380f69151467","Type":"ContainerStarted","Data":"48b05634d279e300e2f9a404c88efd6114acd3bd41895aa11378490342c19b77"} Dec 05 13:30:01 crc kubenswrapper[4784]: I1205 13:30:01.861882 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" podStartSLOduration=1.8618608349999999 podStartE2EDuration="1.861860835s" podCreationTimestamp="2025-12-05 13:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 13:30:01.855032312 +0000 UTC m=+3881.275099137" watchObservedRunningTime="2025-12-05 13:30:01.861860835 +0000 UTC m=+3881.281927650" Dec 05 13:30:02 crc kubenswrapper[4784]: I1205 13:30:02.851773 4784 generic.go:334] "Generic (PLEG): container finished" podID="66ab9e30-b279-4ffd-a2df-380f69151467" containerID="56b4d1a2c2a3b3c1e79a15d2cc3cfdfba6af1297c540984e1575269e209520b3" exitCode=0 Dec 05 13:30:02 crc kubenswrapper[4784]: I1205 13:30:02.851878 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" event={"ID":"66ab9e30-b279-4ffd-a2df-380f69151467","Type":"ContainerDied","Data":"56b4d1a2c2a3b3c1e79a15d2cc3cfdfba6af1297c540984e1575269e209520b3"} Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.232174 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.313352 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume\") pod \"66ab9e30-b279-4ffd-a2df-380f69151467\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.313456 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume\") pod \"66ab9e30-b279-4ffd-a2df-380f69151467\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.313503 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjxnc\" (UniqueName: \"kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc\") pod \"66ab9e30-b279-4ffd-a2df-380f69151467\" (UID: \"66ab9e30-b279-4ffd-a2df-380f69151467\") " Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.313955 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume" (OuterVolumeSpecName: "config-volume") pod "66ab9e30-b279-4ffd-a2df-380f69151467" (UID: "66ab9e30-b279-4ffd-a2df-380f69151467"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.314351 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/66ab9e30-b279-4ffd-a2df-380f69151467-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.322307 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "66ab9e30-b279-4ffd-a2df-380f69151467" (UID: "66ab9e30-b279-4ffd-a2df-380f69151467"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.322383 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc" (OuterVolumeSpecName: "kube-api-access-xjxnc") pod "66ab9e30-b279-4ffd-a2df-380f69151467" (UID: "66ab9e30-b279-4ffd-a2df-380f69151467"). InnerVolumeSpecName "kube-api-access-xjxnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.416410 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/66ab9e30-b279-4ffd-a2df-380f69151467-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.416633 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjxnc\" (UniqueName: \"kubernetes.io/projected/66ab9e30-b279-4ffd-a2df-380f69151467-kube-api-access-xjxnc\") on node \"crc\" DevicePath \"\"" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.583709 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd"] Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.593427 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415645-2s9dd"] Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.873565 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" event={"ID":"66ab9e30-b279-4ffd-a2df-380f69151467","Type":"ContainerDied","Data":"48b05634d279e300e2f9a404c88efd6114acd3bd41895aa11378490342c19b77"} Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.873605 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48b05634d279e300e2f9a404c88efd6114acd3bd41895aa11378490342c19b77" Dec 05 13:30:04 crc kubenswrapper[4784]: I1205 13:30:04.873651 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl" Dec 05 13:30:05 crc kubenswrapper[4784]: I1205 13:30:05.008896 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13e0a0c7-4992-40a3-8835-7163421c900d" path="/var/lib/kubelet/pods/13e0a0c7-4992-40a3-8835-7163421c900d/volumes" Dec 05 13:30:10 crc kubenswrapper[4784]: I1205 13:30:10.000121 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:30:10 crc kubenswrapper[4784]: E1205 13:30:10.001003 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:30:24 crc kubenswrapper[4784]: I1205 13:30:24.999163 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:30:25 crc kubenswrapper[4784]: E1205 13:30:24.999856 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:30:39 crc kubenswrapper[4784]: I1205 13:30:39.883954 4784 scope.go:117] "RemoveContainer" containerID="7257666f60b1b74ab9e6db2a256cf4aecbfe85562cfc50288182a9f960f76db2" Dec 05 13:30:39 crc kubenswrapper[4784]: I1205 13:30:39.999835 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:30:41 crc kubenswrapper[4784]: I1205 13:30:41.259121 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00"} Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.725045 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:30:52 crc kubenswrapper[4784]: E1205 13:30:52.726106 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ab9e30-b279-4ffd-a2df-380f69151467" containerName="collect-profiles" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.726122 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ab9e30-b279-4ffd-a2df-380f69151467" containerName="collect-profiles" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.726427 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ab9e30-b279-4ffd-a2df-380f69151467" containerName="collect-profiles" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.729373 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.746698 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.854090 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.854486 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.854538 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rr7x\" (UniqueName: \"kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.956723 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.956773 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rr7x\" (UniqueName: \"kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.956880 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.957408 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.957621 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:52 crc kubenswrapper[4784]: I1205 13:30:52.978514 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rr7x\" (UniqueName: \"kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x\") pod \"community-operators-nhjcq\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:53 crc kubenswrapper[4784]: I1205 13:30:53.050641 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:30:53 crc kubenswrapper[4784]: I1205 13:30:53.707039 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:30:53 crc kubenswrapper[4784]: W1205 13:30:53.716695 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8792f5f4_a75c_49de_8964_ab829f8dfe59.slice/crio-4b6e14c93d296a532bfda6ecb2e671df5e5057211a263f23fe351709f4543013 WatchSource:0}: Error finding container 4b6e14c93d296a532bfda6ecb2e671df5e5057211a263f23fe351709f4543013: Status 404 returned error can't find the container with id 4b6e14c93d296a532bfda6ecb2e671df5e5057211a263f23fe351709f4543013 Dec 05 13:30:54 crc kubenswrapper[4784]: I1205 13:30:54.409597 4784 generic.go:334] "Generic (PLEG): container finished" podID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerID="fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331" exitCode=0 Dec 05 13:30:54 crc kubenswrapper[4784]: I1205 13:30:54.409677 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerDied","Data":"fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331"} Dec 05 13:30:54 crc kubenswrapper[4784]: I1205 13:30:54.409937 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerStarted","Data":"4b6e14c93d296a532bfda6ecb2e671df5e5057211a263f23fe351709f4543013"} Dec 05 13:30:54 crc kubenswrapper[4784]: I1205 13:30:54.411791 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:30:55 crc kubenswrapper[4784]: I1205 13:30:55.420656 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerStarted","Data":"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069"} Dec 05 13:30:56 crc kubenswrapper[4784]: I1205 13:30:56.435526 4784 generic.go:334] "Generic (PLEG): container finished" podID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerID="66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069" exitCode=0 Dec 05 13:30:56 crc kubenswrapper[4784]: I1205 13:30:56.435697 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerDied","Data":"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069"} Dec 05 13:30:57 crc kubenswrapper[4784]: I1205 13:30:57.446354 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerStarted","Data":"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a"} Dec 05 13:30:57 crc kubenswrapper[4784]: I1205 13:30:57.469879 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nhjcq" podStartSLOduration=3.045617416 podStartE2EDuration="5.46985808s" podCreationTimestamp="2025-12-05 13:30:52 +0000 UTC" firstStartedPulling="2025-12-05 13:30:54.411510194 +0000 UTC m=+3933.831577009" lastFinishedPulling="2025-12-05 13:30:56.835750858 +0000 UTC m=+3936.255817673" observedRunningTime="2025-12-05 13:30:57.46278804 +0000 UTC m=+3936.882854855" watchObservedRunningTime="2025-12-05 13:30:57.46985808 +0000 UTC m=+3936.889924895" Dec 05 13:31:03 crc kubenswrapper[4784]: I1205 13:31:03.051247 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:03 crc kubenswrapper[4784]: I1205 13:31:03.051958 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:03 crc kubenswrapper[4784]: I1205 13:31:03.121578 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:03 crc kubenswrapper[4784]: I1205 13:31:03.543031 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:03 crc kubenswrapper[4784]: I1205 13:31:03.590956 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:31:05 crc kubenswrapper[4784]: I1205 13:31:05.516749 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nhjcq" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="registry-server" containerID="cri-o://00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a" gracePeriod=2 Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.147721 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.333018 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rr7x\" (UniqueName: \"kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x\") pod \"8792f5f4-a75c-49de-8964-ab829f8dfe59\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.333609 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content\") pod \"8792f5f4-a75c-49de-8964-ab829f8dfe59\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.333786 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities\") pod \"8792f5f4-a75c-49de-8964-ab829f8dfe59\" (UID: \"8792f5f4-a75c-49de-8964-ab829f8dfe59\") " Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.335069 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities" (OuterVolumeSpecName: "utilities") pod "8792f5f4-a75c-49de-8964-ab829f8dfe59" (UID: "8792f5f4-a75c-49de-8964-ab829f8dfe59"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.340705 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x" (OuterVolumeSpecName: "kube-api-access-9rr7x") pod "8792f5f4-a75c-49de-8964-ab829f8dfe59" (UID: "8792f5f4-a75c-49de-8964-ab829f8dfe59"). InnerVolumeSpecName "kube-api-access-9rr7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.381446 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8792f5f4-a75c-49de-8964-ab829f8dfe59" (UID: "8792f5f4-a75c-49de-8964-ab829f8dfe59"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.437717 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.438114 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8792f5f4-a75c-49de-8964-ab829f8dfe59-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.438177 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rr7x\" (UniqueName: \"kubernetes.io/projected/8792f5f4-a75c-49de-8964-ab829f8dfe59-kube-api-access-9rr7x\") on node \"crc\" DevicePath \"\"" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.534629 4784 generic.go:334] "Generic (PLEG): container finished" podID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerID="00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a" exitCode=0 Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.534667 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nhjcq" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.534687 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerDied","Data":"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a"} Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.534819 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nhjcq" event={"ID":"8792f5f4-a75c-49de-8964-ab829f8dfe59","Type":"ContainerDied","Data":"4b6e14c93d296a532bfda6ecb2e671df5e5057211a263f23fe351709f4543013"} Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.534891 4784 scope.go:117] "RemoveContainer" containerID="00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.572788 4784 scope.go:117] "RemoveContainer" containerID="66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.616853 4784 scope.go:117] "RemoveContainer" containerID="fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.619385 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.627347 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nhjcq"] Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.673064 4784 scope.go:117] "RemoveContainer" containerID="00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a" Dec 05 13:31:06 crc kubenswrapper[4784]: E1205 13:31:06.673705 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a\": container with ID starting with 00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a not found: ID does not exist" containerID="00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.673752 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a"} err="failed to get container status \"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a\": rpc error: code = NotFound desc = could not find container \"00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a\": container with ID starting with 00905bff606947e861bdab4f7a0dbc6e230153a4623b5b054072471b1c0a377a not found: ID does not exist" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.673783 4784 scope.go:117] "RemoveContainer" containerID="66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069" Dec 05 13:31:06 crc kubenswrapper[4784]: E1205 13:31:06.674275 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069\": container with ID starting with 66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069 not found: ID does not exist" containerID="66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.674303 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069"} err="failed to get container status \"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069\": rpc error: code = NotFound desc = could not find container \"66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069\": container with ID starting with 66e4e225116dd489ee92be03afb51705938255707cb333e6e60b1d168ff0a069 not found: ID does not exist" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.674320 4784 scope.go:117] "RemoveContainer" containerID="fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331" Dec 05 13:31:06 crc kubenswrapper[4784]: E1205 13:31:06.674851 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331\": container with ID starting with fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331 not found: ID does not exist" containerID="fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331" Dec 05 13:31:06 crc kubenswrapper[4784]: I1205 13:31:06.674880 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331"} err="failed to get container status \"fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331\": rpc error: code = NotFound desc = could not find container \"fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331\": container with ID starting with fdedf3f634cc3bd136e609b869569a681e8057374944355d66b60e8552093331 not found: ID does not exist" Dec 05 13:31:07 crc kubenswrapper[4784]: I1205 13:31:07.015621 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" path="/var/lib/kubelet/pods/8792f5f4-a75c-49de-8964-ab829f8dfe59/volumes" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.526276 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:31:54 crc kubenswrapper[4784]: E1205 13:31:54.527734 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="extract-content" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.527759 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="extract-content" Dec 05 13:31:54 crc kubenswrapper[4784]: E1205 13:31:54.527777 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="registry-server" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.527790 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="registry-server" Dec 05 13:31:54 crc kubenswrapper[4784]: E1205 13:31:54.527836 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="extract-utilities" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.527851 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="extract-utilities" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.528284 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8792f5f4-a75c-49de-8964-ab829f8dfe59" containerName="registry-server" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.532336 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.592591 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.640218 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j2zq\" (UniqueName: \"kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.640306 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.640428 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.742521 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.742887 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.743047 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j2zq\" (UniqueName: \"kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.743385 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:54 crc kubenswrapper[4784]: I1205 13:31:54.743385 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:55 crc kubenswrapper[4784]: I1205 13:31:55.190627 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j2zq\" (UniqueName: \"kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq\") pod \"redhat-marketplace-qrv9p\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:55 crc kubenswrapper[4784]: I1205 13:31:55.474917 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:31:56 crc kubenswrapper[4784]: I1205 13:31:56.013524 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:31:56 crc kubenswrapper[4784]: I1205 13:31:56.122249 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerStarted","Data":"6e4ce8be919b6ed971fec50d6e3717f110fcef9c70b1d882b9e54f218d7ed8a1"} Dec 05 13:31:57 crc kubenswrapper[4784]: I1205 13:31:57.133983 4784 generic.go:334] "Generic (PLEG): container finished" podID="f8a44836-cd70-4225-9c32-8dd141474d15" containerID="59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6" exitCode=0 Dec 05 13:31:57 crc kubenswrapper[4784]: I1205 13:31:57.134074 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerDied","Data":"59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6"} Dec 05 13:31:59 crc kubenswrapper[4784]: I1205 13:31:59.171219 4784 generic.go:334] "Generic (PLEG): container finished" podID="f8a44836-cd70-4225-9c32-8dd141474d15" containerID="148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d" exitCode=0 Dec 05 13:31:59 crc kubenswrapper[4784]: I1205 13:31:59.172076 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerDied","Data":"148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d"} Dec 05 13:31:59 crc kubenswrapper[4784]: E1205 13:31:59.302656 4784 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8a44836_cd70_4225_9c32_8dd141474d15.slice/crio-148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8a44836_cd70_4225_9c32_8dd141474d15.slice/crio-conmon-148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d.scope\": RecentStats: unable to find data in memory cache]" Dec 05 13:32:01 crc kubenswrapper[4784]: I1205 13:32:01.194932 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerStarted","Data":"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620"} Dec 05 13:32:01 crc kubenswrapper[4784]: I1205 13:32:01.233361 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qrv9p" podStartSLOduration=4.391896728 podStartE2EDuration="7.233335419s" podCreationTimestamp="2025-12-05 13:31:54 +0000 UTC" firstStartedPulling="2025-12-05 13:31:57.136434311 +0000 UTC m=+3996.556501166" lastFinishedPulling="2025-12-05 13:31:59.977873042 +0000 UTC m=+3999.397939857" observedRunningTime="2025-12-05 13:32:01.216367691 +0000 UTC m=+4000.636434506" watchObservedRunningTime="2025-12-05 13:32:01.233335419 +0000 UTC m=+4000.653402244" Dec 05 13:32:05 crc kubenswrapper[4784]: I1205 13:32:05.476070 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:05 crc kubenswrapper[4784]: I1205 13:32:05.476866 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:05 crc kubenswrapper[4784]: I1205 13:32:05.532055 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:06 crc kubenswrapper[4784]: I1205 13:32:06.838153 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:06 crc kubenswrapper[4784]: I1205 13:32:06.891528 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.278018 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qrv9p" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="registry-server" containerID="cri-o://d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620" gracePeriod=2 Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.827829 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.979599 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9j2zq\" (UniqueName: \"kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq\") pod \"f8a44836-cd70-4225-9c32-8dd141474d15\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.979757 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities\") pod \"f8a44836-cd70-4225-9c32-8dd141474d15\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.979877 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content\") pod \"f8a44836-cd70-4225-9c32-8dd141474d15\" (UID: \"f8a44836-cd70-4225-9c32-8dd141474d15\") " Dec 05 13:32:08 crc kubenswrapper[4784]: I1205 13:32:08.980592 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities" (OuterVolumeSpecName: "utilities") pod "f8a44836-cd70-4225-9c32-8dd141474d15" (UID: "f8a44836-cd70-4225-9c32-8dd141474d15"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:08.997427 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq" (OuterVolumeSpecName: "kube-api-access-9j2zq") pod "f8a44836-cd70-4225-9c32-8dd141474d15" (UID: "f8a44836-cd70-4225-9c32-8dd141474d15"). InnerVolumeSpecName "kube-api-access-9j2zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.013883 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8a44836-cd70-4225-9c32-8dd141474d15" (UID: "f8a44836-cd70-4225-9c32-8dd141474d15"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.082263 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9j2zq\" (UniqueName: \"kubernetes.io/projected/f8a44836-cd70-4225-9c32-8dd141474d15-kube-api-access-9j2zq\") on node \"crc\" DevicePath \"\"" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.082293 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.082304 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a44836-cd70-4225-9c32-8dd141474d15-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.291151 4784 generic.go:334] "Generic (PLEG): container finished" podID="f8a44836-cd70-4225-9c32-8dd141474d15" containerID="d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620" exitCode=0 Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.291225 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerDied","Data":"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620"} Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.291266 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrv9p" event={"ID":"f8a44836-cd70-4225-9c32-8dd141474d15","Type":"ContainerDied","Data":"6e4ce8be919b6ed971fec50d6e3717f110fcef9c70b1d882b9e54f218d7ed8a1"} Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.291294 4784 scope.go:117] "RemoveContainer" containerID="d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.291455 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrv9p" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.321992 4784 scope.go:117] "RemoveContainer" containerID="148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.346359 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.356934 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrv9p"] Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.368513 4784 scope.go:117] "RemoveContainer" containerID="59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.408354 4784 scope.go:117] "RemoveContainer" containerID="d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620" Dec 05 13:32:09 crc kubenswrapper[4784]: E1205 13:32:09.409026 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620\": container with ID starting with d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620 not found: ID does not exist" containerID="d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.409086 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620"} err="failed to get container status \"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620\": rpc error: code = NotFound desc = could not find container \"d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620\": container with ID starting with d97ae5446781276b586e345fb0396808a148951e4b18fb5af465f2461eabd620 not found: ID does not exist" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.409119 4784 scope.go:117] "RemoveContainer" containerID="148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d" Dec 05 13:32:09 crc kubenswrapper[4784]: E1205 13:32:09.409775 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d\": container with ID starting with 148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d not found: ID does not exist" containerID="148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.409809 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d"} err="failed to get container status \"148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d\": rpc error: code = NotFound desc = could not find container \"148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d\": container with ID starting with 148c07963647b8274b70dd1b35a350795846f9ea0b408cae504c904e0bea9f9d not found: ID does not exist" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.409828 4784 scope.go:117] "RemoveContainer" containerID="59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6" Dec 05 13:32:09 crc kubenswrapper[4784]: E1205 13:32:09.410119 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6\": container with ID starting with 59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6 not found: ID does not exist" containerID="59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6" Dec 05 13:32:09 crc kubenswrapper[4784]: I1205 13:32:09.410248 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6"} err="failed to get container status \"59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6\": rpc error: code = NotFound desc = could not find container \"59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6\": container with ID starting with 59f23fa1354a4668967ec8a4bc24720808eeae90d1f2c14d5ab9df88042c11b6 not found: ID does not exist" Dec 05 13:32:11 crc kubenswrapper[4784]: I1205 13:32:11.021400 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" path="/var/lib/kubelet/pods/f8a44836-cd70-4225-9c32-8dd141474d15/volumes" Dec 05 13:32:59 crc kubenswrapper[4784]: I1205 13:32:59.573256 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:32:59 crc kubenswrapper[4784]: I1205 13:32:59.574013 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:33:29 crc kubenswrapper[4784]: I1205 13:33:29.572378 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:33:29 crc kubenswrapper[4784]: I1205 13:33:29.572970 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:33:59 crc kubenswrapper[4784]: I1205 13:33:59.572278 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:33:59 crc kubenswrapper[4784]: I1205 13:33:59.572927 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:33:59 crc kubenswrapper[4784]: I1205 13:33:59.572984 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:33:59 crc kubenswrapper[4784]: I1205 13:33:59.573885 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:33:59 crc kubenswrapper[4784]: I1205 13:33:59.573954 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00" gracePeriod=600 Dec 05 13:34:00 crc kubenswrapper[4784]: I1205 13:34:00.517770 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00"} Dec 05 13:34:00 crc kubenswrapper[4784]: I1205 13:34:00.518371 4784 scope.go:117] "RemoveContainer" containerID="62a6a42d74c4a846333978c8a37da55ff1fd9eead23db76f71b5c7cee7012f21" Dec 05 13:34:00 crc kubenswrapper[4784]: I1205 13:34:00.517734 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00" exitCode=0 Dec 05 13:34:00 crc kubenswrapper[4784]: I1205 13:34:00.518468 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1"} Dec 05 13:35:59 crc kubenswrapper[4784]: I1205 13:35:59.572279 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:35:59 crc kubenswrapper[4784]: I1205 13:35:59.572884 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:36:29 crc kubenswrapper[4784]: I1205 13:36:29.572247 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:36:29 crc kubenswrapper[4784]: I1205 13:36:29.572788 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:36:59 crc kubenswrapper[4784]: I1205 13:36:59.572306 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:36:59 crc kubenswrapper[4784]: I1205 13:36:59.573018 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:36:59 crc kubenswrapper[4784]: I1205 13:36:59.573086 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:36:59 crc kubenswrapper[4784]: I1205 13:36:59.574325 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:36:59 crc kubenswrapper[4784]: I1205 13:36:59.574429 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" gracePeriod=600 Dec 05 13:36:59 crc kubenswrapper[4784]: E1205 13:36:59.693874 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:00 crc kubenswrapper[4784]: I1205 13:37:00.448542 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" exitCode=0 Dec 05 13:37:00 crc kubenswrapper[4784]: I1205 13:37:00.448593 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1"} Dec 05 13:37:00 crc kubenswrapper[4784]: I1205 13:37:00.448630 4784 scope.go:117] "RemoveContainer" containerID="738ead2c18af5b64d1b1020bddae849226830b50bb2f086570a0ebf27e5d0d00" Dec 05 13:37:00 crc kubenswrapper[4784]: I1205 13:37:00.449365 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:37:00 crc kubenswrapper[4784]: E1205 13:37:00.449745 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:11 crc kubenswrapper[4784]: I1205 13:37:11.008925 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:37:11 crc kubenswrapper[4784]: E1205 13:37:11.010347 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:23 crc kubenswrapper[4784]: I1205 13:37:23.000234 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:37:23 crc kubenswrapper[4784]: E1205 13:37:23.001590 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:37 crc kubenswrapper[4784]: I1205 13:37:36.999881 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:37:37 crc kubenswrapper[4784]: E1205 13:37:37.001688 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.796850 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:37:45 crc kubenswrapper[4784]: E1205 13:37:45.797840 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="extract-utilities" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.797852 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="extract-utilities" Dec 05 13:37:45 crc kubenswrapper[4784]: E1205 13:37:45.797870 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="extract-content" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.797876 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="extract-content" Dec 05 13:37:45 crc kubenswrapper[4784]: E1205 13:37:45.797887 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="registry-server" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.797893 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="registry-server" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.798089 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8a44836-cd70-4225-9c32-8dd141474d15" containerName="registry-server" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.799542 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.808790 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.874055 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n48w\" (UniqueName: \"kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.874170 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.874311 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.978037 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n48w\" (UniqueName: \"kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.978237 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.978267 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.978837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:45 crc kubenswrapper[4784]: I1205 13:37:45.979032 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:46 crc kubenswrapper[4784]: I1205 13:37:46.000115 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n48w\" (UniqueName: \"kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w\") pod \"redhat-operators-6mpfl\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:46 crc kubenswrapper[4784]: I1205 13:37:46.117985 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:37:46 crc kubenswrapper[4784]: I1205 13:37:46.641296 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:37:46 crc kubenswrapper[4784]: I1205 13:37:46.898625 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerStarted","Data":"c5621013a5f29de63059672fda5eaf531247626100c729af5fe97d572f77f271"} Dec 05 13:37:47 crc kubenswrapper[4784]: I1205 13:37:47.912079 4784 generic.go:334] "Generic (PLEG): container finished" podID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerID="2f25971f185d00690f3d1d5ffe19ab35fd36b2d20ccccc33faa2a631432a2fb1" exitCode=0 Dec 05 13:37:47 crc kubenswrapper[4784]: I1205 13:37:47.912235 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerDied","Data":"2f25971f185d00690f3d1d5ffe19ab35fd36b2d20ccccc33faa2a631432a2fb1"} Dec 05 13:37:47 crc kubenswrapper[4784]: I1205 13:37:47.915091 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:37:49 crc kubenswrapper[4784]: I1205 13:37:49.948306 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerStarted","Data":"35d11bc8fc22f6adf34cdd00dadc32d388b85ad785060aff53363bd5cbf26e35"} Dec 05 13:37:49 crc kubenswrapper[4784]: I1205 13:37:49.999887 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:37:50 crc kubenswrapper[4784]: E1205 13:37:50.000448 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:37:56 crc kubenswrapper[4784]: I1205 13:37:56.014458 4784 generic.go:334] "Generic (PLEG): container finished" podID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerID="35d11bc8fc22f6adf34cdd00dadc32d388b85ad785060aff53363bd5cbf26e35" exitCode=0 Dec 05 13:37:56 crc kubenswrapper[4784]: I1205 13:37:56.014534 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerDied","Data":"35d11bc8fc22f6adf34cdd00dadc32d388b85ad785060aff53363bd5cbf26e35"} Dec 05 13:37:59 crc kubenswrapper[4784]: I1205 13:37:59.045918 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerStarted","Data":"3ddd4448d99f68d1a48330742db08edbd680ab4d566955c61001844ade43def6"} Dec 05 13:37:59 crc kubenswrapper[4784]: I1205 13:37:59.067209 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6mpfl" podStartSLOduration=3.980458309 podStartE2EDuration="14.067174466s" podCreationTimestamp="2025-12-05 13:37:45 +0000 UTC" firstStartedPulling="2025-12-05 13:37:47.914824044 +0000 UTC m=+4347.334890859" lastFinishedPulling="2025-12-05 13:37:58.001540191 +0000 UTC m=+4357.421607016" observedRunningTime="2025-12-05 13:37:59.061058397 +0000 UTC m=+4358.481125212" watchObservedRunningTime="2025-12-05 13:37:59.067174466 +0000 UTC m=+4358.487241281" Dec 05 13:38:01 crc kubenswrapper[4784]: I1205 13:38:01.006780 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:38:01 crc kubenswrapper[4784]: E1205 13:38:01.007317 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:38:06 crc kubenswrapper[4784]: I1205 13:38:06.118210 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:06 crc kubenswrapper[4784]: I1205 13:38:06.119702 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:06 crc kubenswrapper[4784]: I1205 13:38:06.184996 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:07 crc kubenswrapper[4784]: I1205 13:38:07.215438 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:07 crc kubenswrapper[4784]: I1205 13:38:07.265747 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:38:09 crc kubenswrapper[4784]: I1205 13:38:09.189503 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6mpfl" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="registry-server" containerID="cri-o://3ddd4448d99f68d1a48330742db08edbd680ab4d566955c61001844ade43def6" gracePeriod=2 Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.201282 4784 generic.go:334] "Generic (PLEG): container finished" podID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerID="3ddd4448d99f68d1a48330742db08edbd680ab4d566955c61001844ade43def6" exitCode=0 Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.201358 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerDied","Data":"3ddd4448d99f68d1a48330742db08edbd680ab4d566955c61001844ade43def6"} Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.596236 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.761167 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n48w\" (UniqueName: \"kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w\") pod \"7368ecae-ebc1-4e96-955b-f5c52c860955\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.762453 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities\") pod \"7368ecae-ebc1-4e96-955b-f5c52c860955\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.762644 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content\") pod \"7368ecae-ebc1-4e96-955b-f5c52c860955\" (UID: \"7368ecae-ebc1-4e96-955b-f5c52c860955\") " Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.763405 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities" (OuterVolumeSpecName: "utilities") pod "7368ecae-ebc1-4e96-955b-f5c52c860955" (UID: "7368ecae-ebc1-4e96-955b-f5c52c860955"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.763730 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.874282 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7368ecae-ebc1-4e96-955b-f5c52c860955" (UID: "7368ecae-ebc1-4e96-955b-f5c52c860955"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:38:10 crc kubenswrapper[4784]: I1205 13:38:10.967648 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7368ecae-ebc1-4e96-955b-f5c52c860955-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.189964 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w" (OuterVolumeSpecName: "kube-api-access-9n48w") pod "7368ecae-ebc1-4e96-955b-f5c52c860955" (UID: "7368ecae-ebc1-4e96-955b-f5c52c860955"). InnerVolumeSpecName "kube-api-access-9n48w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.217791 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mpfl" event={"ID":"7368ecae-ebc1-4e96-955b-f5c52c860955","Type":"ContainerDied","Data":"c5621013a5f29de63059672fda5eaf531247626100c729af5fe97d572f77f271"} Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.218367 4784 scope.go:117] "RemoveContainer" containerID="3ddd4448d99f68d1a48330742db08edbd680ab4d566955c61001844ade43def6" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.217854 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mpfl" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.273572 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n48w\" (UniqueName: \"kubernetes.io/projected/7368ecae-ebc1-4e96-955b-f5c52c860955-kube-api-access-9n48w\") on node \"crc\" DevicePath \"\"" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.453666 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.457924 4784 scope.go:117] "RemoveContainer" containerID="35d11bc8fc22f6adf34cdd00dadc32d388b85ad785060aff53363bd5cbf26e35" Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.465336 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6mpfl"] Dec 05 13:38:11 crc kubenswrapper[4784]: I1205 13:38:11.482620 4784 scope.go:117] "RemoveContainer" containerID="2f25971f185d00690f3d1d5ffe19ab35fd36b2d20ccccc33faa2a631432a2fb1" Dec 05 13:38:13 crc kubenswrapper[4784]: I1205 13:38:13.009042 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" path="/var/lib/kubelet/pods/7368ecae-ebc1-4e96-955b-f5c52c860955/volumes" Dec 05 13:38:14 crc kubenswrapper[4784]: I1205 13:38:14.001843 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:38:14 crc kubenswrapper[4784]: E1205 13:38:14.002919 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:38:26 crc kubenswrapper[4784]: I1205 13:38:25.999947 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:38:26 crc kubenswrapper[4784]: E1205 13:38:26.001241 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:38:37 crc kubenswrapper[4784]: I1205 13:38:36.999975 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:38:37 crc kubenswrapper[4784]: E1205 13:38:37.001644 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:38:50 crc kubenswrapper[4784]: I1205 13:38:49.999577 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:38:50 crc kubenswrapper[4784]: E1205 13:38:50.001566 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:39:02 crc kubenswrapper[4784]: I1205 13:39:02.000686 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:39:02 crc kubenswrapper[4784]: E1205 13:39:02.001455 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:39:13 crc kubenswrapper[4784]: I1205 13:39:13.999221 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:39:14 crc kubenswrapper[4784]: E1205 13:39:14.000097 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:39:25 crc kubenswrapper[4784]: I1205 13:39:25.000254 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:39:25 crc kubenswrapper[4784]: E1205 13:39:25.001613 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:39:39 crc kubenswrapper[4784]: I1205 13:39:38.999434 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:39:39 crc kubenswrapper[4784]: E1205 13:39:39.000125 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:39:51 crc kubenswrapper[4784]: I1205 13:39:51.998493 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:39:52 crc kubenswrapper[4784]: E1205 13:39:52.000411 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:04 crc kubenswrapper[4784]: I1205 13:40:03.999653 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:40:04 crc kubenswrapper[4784]: E1205 13:40:04.000885 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:16 crc kubenswrapper[4784]: I1205 13:40:15.999228 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:40:16 crc kubenswrapper[4784]: E1205 13:40:16.000237 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:31 crc kubenswrapper[4784]: I1205 13:40:31.042060 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:40:31 crc kubenswrapper[4784]: E1205 13:40:31.044707 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:42 crc kubenswrapper[4784]: I1205 13:40:41.999451 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:40:42 crc kubenswrapper[4784]: E1205 13:40:42.000377 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.376546 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:40:53 crc kubenswrapper[4784]: E1205 13:40:53.377471 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="registry-server" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.377485 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="registry-server" Dec 05 13:40:53 crc kubenswrapper[4784]: E1205 13:40:53.377496 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="extract-utilities" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.377503 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="extract-utilities" Dec 05 13:40:53 crc kubenswrapper[4784]: E1205 13:40:53.377514 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="extract-content" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.377522 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="extract-content" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.377724 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7368ecae-ebc1-4e96-955b-f5c52c860955" containerName="registry-server" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.379131 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.393524 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.480516 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4mfr\" (UniqueName: \"kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.480873 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.481046 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.582612 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.582740 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.582830 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4mfr\" (UniqueName: \"kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.583171 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.583269 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.622241 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4mfr\" (UniqueName: \"kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr\") pod \"community-operators-dtvvg\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:53 crc kubenswrapper[4784]: I1205 13:40:53.749136 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:40:54 crc kubenswrapper[4784]: I1205 13:40:53.999795 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:40:54 crc kubenswrapper[4784]: E1205 13:40:54.000241 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:40:54 crc kubenswrapper[4784]: I1205 13:40:54.336820 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:40:54 crc kubenswrapper[4784]: I1205 13:40:54.902651 4784 generic.go:334] "Generic (PLEG): container finished" podID="326882dd-b35c-4279-8bac-19cf8360bafd" containerID="1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e" exitCode=0 Dec 05 13:40:54 crc kubenswrapper[4784]: I1205 13:40:54.902796 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerDied","Data":"1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e"} Dec 05 13:40:54 crc kubenswrapper[4784]: I1205 13:40:54.902959 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerStarted","Data":"5def5b2264edc82dd1aa2ff231ef65f80ed2ff1470fab6389896c24acff01aef"} Dec 05 13:40:55 crc kubenswrapper[4784]: I1205 13:40:55.916399 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerStarted","Data":"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30"} Dec 05 13:40:56 crc kubenswrapper[4784]: I1205 13:40:56.927018 4784 generic.go:334] "Generic (PLEG): container finished" podID="326882dd-b35c-4279-8bac-19cf8360bafd" containerID="6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30" exitCode=0 Dec 05 13:40:56 crc kubenswrapper[4784]: I1205 13:40:56.927112 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerDied","Data":"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30"} Dec 05 13:40:57 crc kubenswrapper[4784]: I1205 13:40:57.936157 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerStarted","Data":"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6"} Dec 05 13:40:57 crc kubenswrapper[4784]: I1205 13:40:57.958848 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dtvvg" podStartSLOduration=2.542779887 podStartE2EDuration="4.958826116s" podCreationTimestamp="2025-12-05 13:40:53 +0000 UTC" firstStartedPulling="2025-12-05 13:40:54.90492472 +0000 UTC m=+4534.324991535" lastFinishedPulling="2025-12-05 13:40:57.320970939 +0000 UTC m=+4536.741037764" observedRunningTime="2025-12-05 13:40:57.952576031 +0000 UTC m=+4537.372642846" watchObservedRunningTime="2025-12-05 13:40:57.958826116 +0000 UTC m=+4537.378892961" Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.771089 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.773694 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.787077 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.911912 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.911991 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:40:59 crc kubenswrapper[4784]: I1205 13:40:59.912038 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpkzg\" (UniqueName: \"kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.013777 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.013851 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.013878 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpkzg\" (UniqueName: \"kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.014429 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.014542 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.037652 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpkzg\" (UniqueName: \"kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg\") pod \"certified-operators-s9s4b\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.099661 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.694542 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.970451 4784 generic.go:334] "Generic (PLEG): container finished" podID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerID="567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01" exitCode=0 Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.970502 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerDied","Data":"567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01"} Dec 05 13:41:00 crc kubenswrapper[4784]: I1205 13:41:00.970534 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerStarted","Data":"0fed5e545324f90d9b4211918562997793deff1eb35b1cada1c869bfcd74be8e"} Dec 05 13:41:01 crc kubenswrapper[4784]: I1205 13:41:01.981289 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerStarted","Data":"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b"} Dec 05 13:41:03 crc kubenswrapper[4784]: I1205 13:41:03.749674 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:03 crc kubenswrapper[4784]: I1205 13:41:03.750370 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:03 crc kubenswrapper[4784]: I1205 13:41:03.812080 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:04 crc kubenswrapper[4784]: I1205 13:41:04.003133 4784 generic.go:334] "Generic (PLEG): container finished" podID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerID="02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b" exitCode=0 Dec 05 13:41:04 crc kubenswrapper[4784]: I1205 13:41:04.003601 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerDied","Data":"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b"} Dec 05 13:41:04 crc kubenswrapper[4784]: I1205 13:41:04.062378 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:05 crc kubenswrapper[4784]: I1205 13:41:05.016626 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerStarted","Data":"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202"} Dec 05 13:41:05 crc kubenswrapper[4784]: I1205 13:41:05.044681 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s9s4b" podStartSLOduration=2.518517861 podStartE2EDuration="6.044654634s" podCreationTimestamp="2025-12-05 13:40:59 +0000 UTC" firstStartedPulling="2025-12-05 13:41:00.974701805 +0000 UTC m=+4540.394768640" lastFinishedPulling="2025-12-05 13:41:04.500838588 +0000 UTC m=+4543.920905413" observedRunningTime="2025-12-05 13:41:05.03329728 +0000 UTC m=+4544.453364105" watchObservedRunningTime="2025-12-05 13:41:05.044654634 +0000 UTC m=+4544.464721469" Dec 05 13:41:05 crc kubenswrapper[4784]: I1205 13:41:05.762128 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.035550 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dtvvg" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="registry-server" containerID="cri-o://e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6" gracePeriod=2 Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.552302 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.663956 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4mfr\" (UniqueName: \"kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr\") pod \"326882dd-b35c-4279-8bac-19cf8360bafd\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.664389 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content\") pod \"326882dd-b35c-4279-8bac-19cf8360bafd\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.664439 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities\") pod \"326882dd-b35c-4279-8bac-19cf8360bafd\" (UID: \"326882dd-b35c-4279-8bac-19cf8360bafd\") " Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.665848 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities" (OuterVolumeSpecName: "utilities") pod "326882dd-b35c-4279-8bac-19cf8360bafd" (UID: "326882dd-b35c-4279-8bac-19cf8360bafd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.706332 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "326882dd-b35c-4279-8bac-19cf8360bafd" (UID: "326882dd-b35c-4279-8bac-19cf8360bafd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.767309 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:06 crc kubenswrapper[4784]: I1205 13:41:06.767348 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326882dd-b35c-4279-8bac-19cf8360bafd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.045543 4784 generic.go:334] "Generic (PLEG): container finished" podID="326882dd-b35c-4279-8bac-19cf8360bafd" containerID="e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6" exitCode=0 Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.045631 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerDied","Data":"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6"} Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.045686 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtvvg" event={"ID":"326882dd-b35c-4279-8bac-19cf8360bafd","Type":"ContainerDied","Data":"5def5b2264edc82dd1aa2ff231ef65f80ed2ff1470fab6389896c24acff01aef"} Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.045725 4784 scope.go:117] "RemoveContainer" containerID="e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.045985 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtvvg" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.073565 4784 scope.go:117] "RemoveContainer" containerID="6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.091950 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr" (OuterVolumeSpecName: "kube-api-access-d4mfr") pod "326882dd-b35c-4279-8bac-19cf8360bafd" (UID: "326882dd-b35c-4279-8bac-19cf8360bafd"). InnerVolumeSpecName "kube-api-access-d4mfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.109811 4784 scope.go:117] "RemoveContainer" containerID="1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.176172 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4mfr\" (UniqueName: \"kubernetes.io/projected/326882dd-b35c-4279-8bac-19cf8360bafd-kube-api-access-d4mfr\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.205036 4784 scope.go:117] "RemoveContainer" containerID="e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6" Dec 05 13:41:07 crc kubenswrapper[4784]: E1205 13:41:07.208087 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6\": container with ID starting with e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6 not found: ID does not exist" containerID="e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.208142 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6"} err="failed to get container status \"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6\": rpc error: code = NotFound desc = could not find container \"e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6\": container with ID starting with e4b6f3ac39727d72c3d6a3022899407ad1bdbb8eb415cf5bd459692e6c574eb6 not found: ID does not exist" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.208169 4784 scope.go:117] "RemoveContainer" containerID="6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30" Dec 05 13:41:07 crc kubenswrapper[4784]: E1205 13:41:07.208752 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30\": container with ID starting with 6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30 not found: ID does not exist" containerID="6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.208769 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30"} err="failed to get container status \"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30\": rpc error: code = NotFound desc = could not find container \"6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30\": container with ID starting with 6ba117b05ed25864cfdbfb44bf8b672209cc0a617585027c8293e85674e07a30 not found: ID does not exist" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.208786 4784 scope.go:117] "RemoveContainer" containerID="1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e" Dec 05 13:41:07 crc kubenswrapper[4784]: E1205 13:41:07.208977 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e\": container with ID starting with 1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e not found: ID does not exist" containerID="1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.208994 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e"} err="failed to get container status \"1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e\": rpc error: code = NotFound desc = could not find container \"1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e\": container with ID starting with 1d14384b6341ce3836224c93f8e4437462f996ed53311d0264b582d38065ca7e not found: ID does not exist" Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.380322 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:41:07 crc kubenswrapper[4784]: I1205 13:41:07.389175 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dtvvg"] Dec 05 13:41:08 crc kubenswrapper[4784]: I1205 13:41:08.999408 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:41:09 crc kubenswrapper[4784]: E1205 13:41:08.999822 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:41:09 crc kubenswrapper[4784]: I1205 13:41:09.011213 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" path="/var/lib/kubelet/pods/326882dd-b35c-4279-8bac-19cf8360bafd/volumes" Dec 05 13:41:10 crc kubenswrapper[4784]: I1205 13:41:10.100065 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:10 crc kubenswrapper[4784]: I1205 13:41:10.100396 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:10 crc kubenswrapper[4784]: I1205 13:41:10.150277 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:11 crc kubenswrapper[4784]: I1205 13:41:11.150742 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:11 crc kubenswrapper[4784]: I1205 13:41:11.767072 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.115226 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s9s4b" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="registry-server" containerID="cri-o://3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202" gracePeriod=2 Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.682423 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.719326 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content\") pod \"9b8793f5-a46c-47aa-a464-099071c0d4e5\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.719538 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpkzg\" (UniqueName: \"kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg\") pod \"9b8793f5-a46c-47aa-a464-099071c0d4e5\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.719564 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities\") pod \"9b8793f5-a46c-47aa-a464-099071c0d4e5\" (UID: \"9b8793f5-a46c-47aa-a464-099071c0d4e5\") " Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.721105 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities" (OuterVolumeSpecName: "utilities") pod "9b8793f5-a46c-47aa-a464-099071c0d4e5" (UID: "9b8793f5-a46c-47aa-a464-099071c0d4e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.728288 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg" (OuterVolumeSpecName: "kube-api-access-bpkzg") pod "9b8793f5-a46c-47aa-a464-099071c0d4e5" (UID: "9b8793f5-a46c-47aa-a464-099071c0d4e5"). InnerVolumeSpecName "kube-api-access-bpkzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.783661 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b8793f5-a46c-47aa-a464-099071c0d4e5" (UID: "9b8793f5-a46c-47aa-a464-099071c0d4e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.822514 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.822739 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpkzg\" (UniqueName: \"kubernetes.io/projected/9b8793f5-a46c-47aa-a464-099071c0d4e5-kube-api-access-bpkzg\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:13 crc kubenswrapper[4784]: I1205 13:41:13.822841 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b8793f5-a46c-47aa-a464-099071c0d4e5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.138094 4784 generic.go:334] "Generic (PLEG): container finished" podID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerID="3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202" exitCode=0 Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.138160 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerDied","Data":"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202"} Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.138239 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9s4b" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.138248 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9s4b" event={"ID":"9b8793f5-a46c-47aa-a464-099071c0d4e5","Type":"ContainerDied","Data":"0fed5e545324f90d9b4211918562997793deff1eb35b1cada1c869bfcd74be8e"} Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.138270 4784 scope.go:117] "RemoveContainer" containerID="3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.179076 4784 scope.go:117] "RemoveContainer" containerID="02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.184026 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.193612 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s9s4b"] Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.205149 4784 scope.go:117] "RemoveContainer" containerID="567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.277926 4784 scope.go:117] "RemoveContainer" containerID="3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202" Dec 05 13:41:14 crc kubenswrapper[4784]: E1205 13:41:14.278398 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202\": container with ID starting with 3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202 not found: ID does not exist" containerID="3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.278439 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202"} err="failed to get container status \"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202\": rpc error: code = NotFound desc = could not find container \"3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202\": container with ID starting with 3427dc7a2d166ccdce0e4dd7e3262f79356ed0fb2ec110cbdff01a37bbecf202 not found: ID does not exist" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.278466 4784 scope.go:117] "RemoveContainer" containerID="02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b" Dec 05 13:41:14 crc kubenswrapper[4784]: E1205 13:41:14.278870 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b\": container with ID starting with 02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b not found: ID does not exist" containerID="02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.278905 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b"} err="failed to get container status \"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b\": rpc error: code = NotFound desc = could not find container \"02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b\": container with ID starting with 02c0a3751bee2d0cd1a26a92482d32df6c20f0309ef78b8bebce45d4eaf9513b not found: ID does not exist" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.278928 4784 scope.go:117] "RemoveContainer" containerID="567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01" Dec 05 13:41:14 crc kubenswrapper[4784]: E1205 13:41:14.279358 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01\": container with ID starting with 567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01 not found: ID does not exist" containerID="567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01" Dec 05 13:41:14 crc kubenswrapper[4784]: I1205 13:41:14.279383 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01"} err="failed to get container status \"567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01\": rpc error: code = NotFound desc = could not find container \"567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01\": container with ID starting with 567e26456ebb3fd326131d5a72f7c0630ff9026e914962ccc1029b0a83cb1d01 not found: ID does not exist" Dec 05 13:41:14 crc kubenswrapper[4784]: E1205 13:41:14.360321 4784 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b8793f5_a46c_47aa_a464_099071c0d4e5.slice/crio-0fed5e545324f90d9b4211918562997793deff1eb35b1cada1c869bfcd74be8e\": RecentStats: unable to find data in memory cache]" Dec 05 13:41:15 crc kubenswrapper[4784]: I1205 13:41:15.013743 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" path="/var/lib/kubelet/pods/9b8793f5-a46c-47aa-a464-099071c0d4e5/volumes" Dec 05 13:41:24 crc kubenswrapper[4784]: I1205 13:41:23.999679 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:41:24 crc kubenswrapper[4784]: E1205 13:41:24.000728 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:41:34 crc kubenswrapper[4784]: I1205 13:41:34.999160 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:41:35 crc kubenswrapper[4784]: E1205 13:41:35.000035 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:41:48 crc kubenswrapper[4784]: I1205 13:41:47.999793 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:41:48 crc kubenswrapper[4784]: E1205 13:41:48.000670 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:42:01 crc kubenswrapper[4784]: I1205 13:42:01.012094 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:42:01 crc kubenswrapper[4784]: I1205 13:42:01.646846 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4"} Dec 05 13:42:14 crc kubenswrapper[4784]: I1205 13:42:14.790797 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="62eaeb31-76a0-4f2b-9bbe-b00f25a620e3" containerName="galera" probeResult="failure" output="command timed out" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.982155 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983465 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="extract-content" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983487 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="extract-content" Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983516 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983528 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983549 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983563 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983601 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="extract-utilities" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983614 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="extract-utilities" Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983636 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="extract-content" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983649 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="extract-content" Dec 05 13:43:14 crc kubenswrapper[4784]: E1205 13:43:14.983683 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="extract-utilities" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.983695 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="extract-utilities" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.984086 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="326882dd-b35c-4279-8bac-19cf8360bafd" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.984159 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b8793f5-a46c-47aa-a464-099071c0d4e5" containerName="registry-server" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.993033 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:14 crc kubenswrapper[4784]: I1205 13:43:14.996806 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.156066 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.156150 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fck6n\" (UniqueName: \"kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.156383 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.258307 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.258451 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.258487 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fck6n\" (UniqueName: \"kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.260059 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.260082 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.493100 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fck6n\" (UniqueName: \"kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n\") pod \"redhat-marketplace-bms4w\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:15 crc kubenswrapper[4784]: I1205 13:43:15.644862 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:16 crc kubenswrapper[4784]: I1205 13:43:16.131301 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:16 crc kubenswrapper[4784]: W1205 13:43:16.134243 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa3374dc_9ead_4950_a6df_38109144f5de.slice/crio-51a828deec5f04085cfaf3def7126b9d68d52ec01c872e05872a0e34d017186c WatchSource:0}: Error finding container 51a828deec5f04085cfaf3def7126b9d68d52ec01c872e05872a0e34d017186c: Status 404 returned error can't find the container with id 51a828deec5f04085cfaf3def7126b9d68d52ec01c872e05872a0e34d017186c Dec 05 13:43:16 crc kubenswrapper[4784]: I1205 13:43:16.495610 4784 generic.go:334] "Generic (PLEG): container finished" podID="aa3374dc-9ead-4950-a6df-38109144f5de" containerID="35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959" exitCode=0 Dec 05 13:43:16 crc kubenswrapper[4784]: I1205 13:43:16.495665 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerDied","Data":"35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959"} Dec 05 13:43:16 crc kubenswrapper[4784]: I1205 13:43:16.495729 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerStarted","Data":"51a828deec5f04085cfaf3def7126b9d68d52ec01c872e05872a0e34d017186c"} Dec 05 13:43:16 crc kubenswrapper[4784]: I1205 13:43:16.499315 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:43:17 crc kubenswrapper[4784]: I1205 13:43:17.507360 4784 generic.go:334] "Generic (PLEG): container finished" podID="aa3374dc-9ead-4950-a6df-38109144f5de" containerID="86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593" exitCode=0 Dec 05 13:43:17 crc kubenswrapper[4784]: I1205 13:43:17.507570 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerDied","Data":"86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593"} Dec 05 13:43:18 crc kubenswrapper[4784]: I1205 13:43:18.521227 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerStarted","Data":"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e"} Dec 05 13:43:18 crc kubenswrapper[4784]: I1205 13:43:18.543850 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bms4w" podStartSLOduration=3.12637431 podStartE2EDuration="4.54382873s" podCreationTimestamp="2025-12-05 13:43:14 +0000 UTC" firstStartedPulling="2025-12-05 13:43:16.498829894 +0000 UTC m=+4675.918896749" lastFinishedPulling="2025-12-05 13:43:17.916284344 +0000 UTC m=+4677.336351169" observedRunningTime="2025-12-05 13:43:18.536618876 +0000 UTC m=+4677.956685711" watchObservedRunningTime="2025-12-05 13:43:18.54382873 +0000 UTC m=+4677.963895555" Dec 05 13:43:25 crc kubenswrapper[4784]: I1205 13:43:25.645556 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:25 crc kubenswrapper[4784]: I1205 13:43:25.646479 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:25 crc kubenswrapper[4784]: I1205 13:43:25.701492 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:26 crc kubenswrapper[4784]: I1205 13:43:26.668651 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:26 crc kubenswrapper[4784]: I1205 13:43:26.743782 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:28 crc kubenswrapper[4784]: I1205 13:43:28.636165 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bms4w" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="registry-server" containerID="cri-o://c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e" gracePeriod=2 Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.128841 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.264085 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities\") pod \"aa3374dc-9ead-4950-a6df-38109144f5de\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.264308 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content\") pod \"aa3374dc-9ead-4950-a6df-38109144f5de\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.264387 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fck6n\" (UniqueName: \"kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n\") pod \"aa3374dc-9ead-4950-a6df-38109144f5de\" (UID: \"aa3374dc-9ead-4950-a6df-38109144f5de\") " Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.264919 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities" (OuterVolumeSpecName: "utilities") pod "aa3374dc-9ead-4950-a6df-38109144f5de" (UID: "aa3374dc-9ead-4950-a6df-38109144f5de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.265277 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.275416 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n" (OuterVolumeSpecName: "kube-api-access-fck6n") pod "aa3374dc-9ead-4950-a6df-38109144f5de" (UID: "aa3374dc-9ead-4950-a6df-38109144f5de"). InnerVolumeSpecName "kube-api-access-fck6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.283946 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa3374dc-9ead-4950-a6df-38109144f5de" (UID: "aa3374dc-9ead-4950-a6df-38109144f5de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.367370 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa3374dc-9ead-4950-a6df-38109144f5de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.367398 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fck6n\" (UniqueName: \"kubernetes.io/projected/aa3374dc-9ead-4950-a6df-38109144f5de-kube-api-access-fck6n\") on node \"crc\" DevicePath \"\"" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.649015 4784 generic.go:334] "Generic (PLEG): container finished" podID="aa3374dc-9ead-4950-a6df-38109144f5de" containerID="c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e" exitCode=0 Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.649079 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerDied","Data":"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e"} Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.649352 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bms4w" event={"ID":"aa3374dc-9ead-4950-a6df-38109144f5de","Type":"ContainerDied","Data":"51a828deec5f04085cfaf3def7126b9d68d52ec01c872e05872a0e34d017186c"} Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.649375 4784 scope.go:117] "RemoveContainer" containerID="c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.649092 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bms4w" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.702509 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.705557 4784 scope.go:117] "RemoveContainer" containerID="86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.718731 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bms4w"] Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.744397 4784 scope.go:117] "RemoveContainer" containerID="35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.800319 4784 scope.go:117] "RemoveContainer" containerID="c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e" Dec 05 13:43:29 crc kubenswrapper[4784]: E1205 13:43:29.800747 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e\": container with ID starting with c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e not found: ID does not exist" containerID="c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.800778 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e"} err="failed to get container status \"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e\": rpc error: code = NotFound desc = could not find container \"c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e\": container with ID starting with c365454215bda5d2b2d769612a60898c04b3f0abbcd59425a04508949c94614e not found: ID does not exist" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.800801 4784 scope.go:117] "RemoveContainer" containerID="86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593" Dec 05 13:43:29 crc kubenswrapper[4784]: E1205 13:43:29.801391 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593\": container with ID starting with 86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593 not found: ID does not exist" containerID="86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.801445 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593"} err="failed to get container status \"86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593\": rpc error: code = NotFound desc = could not find container \"86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593\": container with ID starting with 86986b6e68b7bb0b354971dec68880e4918f9b889ae598ad05c7a26c3cb47593 not found: ID does not exist" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.801476 4784 scope.go:117] "RemoveContainer" containerID="35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959" Dec 05 13:43:29 crc kubenswrapper[4784]: E1205 13:43:29.801811 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959\": container with ID starting with 35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959 not found: ID does not exist" containerID="35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959" Dec 05 13:43:29 crc kubenswrapper[4784]: I1205 13:43:29.801861 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959"} err="failed to get container status \"35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959\": rpc error: code = NotFound desc = could not find container \"35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959\": container with ID starting with 35612fc02818263e58086c2274a2abe7e2d30e16bc0566deb1aff237837bb959 not found: ID does not exist" Dec 05 13:43:31 crc kubenswrapper[4784]: I1205 13:43:31.015308 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" path="/var/lib/kubelet/pods/aa3374dc-9ead-4950-a6df-38109144f5de/volumes" Dec 05 13:44:29 crc kubenswrapper[4784]: I1205 13:44:29.572285 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:44:29 crc kubenswrapper[4784]: I1205 13:44:29.573025 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:44:59 crc kubenswrapper[4784]: I1205 13:44:59.572118 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:44:59 crc kubenswrapper[4784]: I1205 13:44:59.572826 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.178783 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk"] Dec 05 13:45:00 crc kubenswrapper[4784]: E1205 13:45:00.179366 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="extract-utilities" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.179388 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="extract-utilities" Dec 05 13:45:00 crc kubenswrapper[4784]: E1205 13:45:00.179407 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="extract-content" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.179415 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="extract-content" Dec 05 13:45:00 crc kubenswrapper[4784]: E1205 13:45:00.179454 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.179462 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.179695 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa3374dc-9ead-4950-a6df-38109144f5de" containerName="registry-server" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.180645 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.188943 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.189317 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.192384 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk"] Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.298685 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.298773 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.298820 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6d7l\" (UniqueName: \"kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.401546 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.401656 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.401715 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6d7l\" (UniqueName: \"kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.404542 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.412323 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.427624 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6d7l\" (UniqueName: \"kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l\") pod \"collect-profiles-29415705-jwrvk\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.508927 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:00 crc kubenswrapper[4784]: I1205 13:45:00.962098 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk"] Dec 05 13:45:01 crc kubenswrapper[4784]: I1205 13:45:01.625593 4784 generic.go:334] "Generic (PLEG): container finished" podID="94bf0c80-fef0-47d2-84c6-8e18df739ae2" containerID="31ad6c89bedc6ad76af135f13d8517fcfc71720848e152d24799914685c38b3a" exitCode=0 Dec 05 13:45:01 crc kubenswrapper[4784]: I1205 13:45:01.625646 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" event={"ID":"94bf0c80-fef0-47d2-84c6-8e18df739ae2","Type":"ContainerDied","Data":"31ad6c89bedc6ad76af135f13d8517fcfc71720848e152d24799914685c38b3a"} Dec 05 13:45:01 crc kubenswrapper[4784]: I1205 13:45:01.625845 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" event={"ID":"94bf0c80-fef0-47d2-84c6-8e18df739ae2","Type":"ContainerStarted","Data":"7226b4a52dc465ce1f3729e5428ac6511b69a7a657a6c15c043e49d5124aa156"} Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.012631 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.164689 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume\") pod \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.165309 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume\") pod \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.165358 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6d7l\" (UniqueName: \"kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l\") pod \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\" (UID: \"94bf0c80-fef0-47d2-84c6-8e18df739ae2\") " Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.165735 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume" (OuterVolumeSpecName: "config-volume") pod "94bf0c80-fef0-47d2-84c6-8e18df739ae2" (UID: "94bf0c80-fef0-47d2-84c6-8e18df739ae2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.166587 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94bf0c80-fef0-47d2-84c6-8e18df739ae2-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.170675 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l" (OuterVolumeSpecName: "kube-api-access-d6d7l") pod "94bf0c80-fef0-47d2-84c6-8e18df739ae2" (UID: "94bf0c80-fef0-47d2-84c6-8e18df739ae2"). InnerVolumeSpecName "kube-api-access-d6d7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.174398 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "94bf0c80-fef0-47d2-84c6-8e18df739ae2" (UID: "94bf0c80-fef0-47d2-84c6-8e18df739ae2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.268437 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94bf0c80-fef0-47d2-84c6-8e18df739ae2-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.268474 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6d7l\" (UniqueName: \"kubernetes.io/projected/94bf0c80-fef0-47d2-84c6-8e18df739ae2-kube-api-access-d6d7l\") on node \"crc\" DevicePath \"\"" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.651429 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" event={"ID":"94bf0c80-fef0-47d2-84c6-8e18df739ae2","Type":"ContainerDied","Data":"7226b4a52dc465ce1f3729e5428ac6511b69a7a657a6c15c043e49d5124aa156"} Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.651467 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7226b4a52dc465ce1f3729e5428ac6511b69a7a657a6c15c043e49d5124aa156" Dec 05 13:45:03 crc kubenswrapper[4784]: I1205 13:45:03.651482 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415705-jwrvk" Dec 05 13:45:04 crc kubenswrapper[4784]: I1205 13:45:04.084370 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh"] Dec 05 13:45:04 crc kubenswrapper[4784]: I1205 13:45:04.094167 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415660-jbvrh"] Dec 05 13:45:05 crc kubenswrapper[4784]: I1205 13:45:05.027509 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65395f2b-fd20-43e9-860e-3a35033375bf" path="/var/lib/kubelet/pods/65395f2b-fd20-43e9-860e-3a35033375bf/volumes" Dec 05 13:45:29 crc kubenswrapper[4784]: I1205 13:45:29.572834 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:45:29 crc kubenswrapper[4784]: I1205 13:45:29.573508 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:45:29 crc kubenswrapper[4784]: I1205 13:45:29.573557 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:45:29 crc kubenswrapper[4784]: I1205 13:45:29.574158 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:45:29 crc kubenswrapper[4784]: I1205 13:45:29.574236 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4" gracePeriod=600 Dec 05 13:45:30 crc kubenswrapper[4784]: I1205 13:45:30.102051 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4" exitCode=0 Dec 05 13:45:30 crc kubenswrapper[4784]: I1205 13:45:30.102116 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4"} Dec 05 13:45:30 crc kubenswrapper[4784]: I1205 13:45:30.102487 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6"} Dec 05 13:45:30 crc kubenswrapper[4784]: I1205 13:45:30.102514 4784 scope.go:117] "RemoveContainer" containerID="17364ba8ca6f9c9e7dab8082eb826a2beef6b231c3f26dfd80166ed3425508a1" Dec 05 13:45:40 crc kubenswrapper[4784]: I1205 13:45:40.339397 4784 scope.go:117] "RemoveContainer" containerID="f38242b277a2a8c10172715feb9edc687484e10ab61959c758fa871365ec3da6" Dec 05 13:47:29 crc kubenswrapper[4784]: I1205 13:47:29.572542 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:47:29 crc kubenswrapper[4784]: I1205 13:47:29.573082 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:47:59 crc kubenswrapper[4784]: I1205 13:47:59.572890 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:47:59 crc kubenswrapper[4784]: I1205 13:47:59.573407 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.573063 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.573695 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.573746 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.574736 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.574815 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" gracePeriod=600 Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.996236 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" exitCode=0 Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.996309 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6"} Dec 05 13:48:29 crc kubenswrapper[4784]: I1205 13:48:29.996693 4784 scope.go:117] "RemoveContainer" containerID="8906b04b01f65ffa3ebdfad47753606541d381ba8d87566cc51aba9d2d2a14a4" Dec 05 13:48:30 crc kubenswrapper[4784]: E1205 13:48:30.211742 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:48:31 crc kubenswrapper[4784]: I1205 13:48:31.010141 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:48:31 crc kubenswrapper[4784]: E1205 13:48:31.010690 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:48:41 crc kubenswrapper[4784]: I1205 13:48:41.999042 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:48:42 crc kubenswrapper[4784]: E1205 13:48:41.999981 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:48:52 crc kubenswrapper[4784]: I1205 13:48:52.999024 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:48:53 crc kubenswrapper[4784]: E1205 13:48:53.000300 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:49:04 crc kubenswrapper[4784]: I1205 13:49:03.999680 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:49:04 crc kubenswrapper[4784]: E1205 13:49:04.000655 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:49:14 crc kubenswrapper[4784]: E1205 13:49:14.524041 4784 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.223:56552->38.102.83.223:44671: write tcp 38.102.83.223:56552->38.102.83.223:44671: write: connection reset by peer Dec 05 13:49:15 crc kubenswrapper[4784]: I1205 13:49:15.998460 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:49:15 crc kubenswrapper[4784]: E1205 13:49:15.998714 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:49:31 crc kubenswrapper[4784]: I1205 13:49:31.013180 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:49:31 crc kubenswrapper[4784]: E1205 13:49:31.014163 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:49:41 crc kubenswrapper[4784]: I1205 13:49:41.999237 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:49:42 crc kubenswrapper[4784]: E1205 13:49:41.999925 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:49:53 crc kubenswrapper[4784]: I1205 13:49:52.999617 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:49:53 crc kubenswrapper[4784]: E1205 13:49:53.000457 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:50:08 crc kubenswrapper[4784]: I1205 13:50:07.999049 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:50:08 crc kubenswrapper[4784]: E1205 13:50:07.999997 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:50:23 crc kubenswrapper[4784]: I1205 13:50:22.999672 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:50:23 crc kubenswrapper[4784]: E1205 13:50:23.000613 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:50:36 crc kubenswrapper[4784]: I1205 13:50:35.999627 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:50:36 crc kubenswrapper[4784]: E1205 13:50:36.000583 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:50:49 crc kubenswrapper[4784]: I1205 13:50:49.998928 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:50:50 crc kubenswrapper[4784]: E1205 13:50:49.999742 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:51:01 crc kubenswrapper[4784]: I1205 13:51:01.019204 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:51:01 crc kubenswrapper[4784]: E1205 13:51:01.020032 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:51:16 crc kubenswrapper[4784]: I1205 13:51:15.999747 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:51:16 crc kubenswrapper[4784]: E1205 13:51:16.000710 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:51:31 crc kubenswrapper[4784]: I1205 13:51:31.019509 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:51:31 crc kubenswrapper[4784]: E1205 13:51:31.021009 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:51:43 crc kubenswrapper[4784]: I1205 13:51:43.999330 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:51:44 crc kubenswrapper[4784]: E1205 13:51:44.000228 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:51:59 crc kubenswrapper[4784]: I1205 13:51:58.999727 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:51:59 crc kubenswrapper[4784]: E1205 13:51:59.001063 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:52:09 crc kubenswrapper[4784]: I1205 13:52:09.999178 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:52:10 crc kubenswrapper[4784]: E1205 13:52:09.999983 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:52:12 crc kubenswrapper[4784]: I1205 13:52:12.967942 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:12 crc kubenswrapper[4784]: E1205 13:52:12.969959 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94bf0c80-fef0-47d2-84c6-8e18df739ae2" containerName="collect-profiles" Dec 05 13:52:12 crc kubenswrapper[4784]: I1205 13:52:12.969996 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="94bf0c80-fef0-47d2-84c6-8e18df739ae2" containerName="collect-profiles" Dec 05 13:52:12 crc kubenswrapper[4784]: I1205 13:52:12.970563 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="94bf0c80-fef0-47d2-84c6-8e18df739ae2" containerName="collect-profiles" Dec 05 13:52:12 crc kubenswrapper[4784]: I1205 13:52:12.974718 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.014026 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.074874 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.074917 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.074996 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtznm\" (UniqueName: \"kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.176627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.176675 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.176741 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtznm\" (UniqueName: \"kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.177531 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.177763 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.196440 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtznm\" (UniqueName: \"kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm\") pod \"certified-operators-9gwjp\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.300155 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:13 crc kubenswrapper[4784]: I1205 13:52:13.882215 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:13 crc kubenswrapper[4784]: W1205 13:52:13.890380 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeec33a98_4519_4b27_a3b0_fd8433e550d0.slice/crio-885b73962b410aa759fc6f44933d3472b7acf8ffbabaa01db3d55812f6227c0d WatchSource:0}: Error finding container 885b73962b410aa759fc6f44933d3472b7acf8ffbabaa01db3d55812f6227c0d: Status 404 returned error can't find the container with id 885b73962b410aa759fc6f44933d3472b7acf8ffbabaa01db3d55812f6227c0d Dec 05 13:52:14 crc kubenswrapper[4784]: I1205 13:52:14.316928 4784 generic.go:334] "Generic (PLEG): container finished" podID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerID="7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471" exitCode=0 Dec 05 13:52:14 crc kubenswrapper[4784]: I1205 13:52:14.317070 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerDied","Data":"7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471"} Dec 05 13:52:14 crc kubenswrapper[4784]: I1205 13:52:14.317470 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerStarted","Data":"885b73962b410aa759fc6f44933d3472b7acf8ffbabaa01db3d55812f6227c0d"} Dec 05 13:52:14 crc kubenswrapper[4784]: I1205 13:52:14.321033 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:52:16 crc kubenswrapper[4784]: I1205 13:52:16.338463 4784 generic.go:334] "Generic (PLEG): container finished" podID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerID="693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008" exitCode=0 Dec 05 13:52:16 crc kubenswrapper[4784]: I1205 13:52:16.338562 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerDied","Data":"693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008"} Dec 05 13:52:17 crc kubenswrapper[4784]: I1205 13:52:17.352367 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerStarted","Data":"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a"} Dec 05 13:52:17 crc kubenswrapper[4784]: I1205 13:52:17.387042 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9gwjp" podStartSLOduration=2.92686747 podStartE2EDuration="5.387010112s" podCreationTimestamp="2025-12-05 13:52:12 +0000 UTC" firstStartedPulling="2025-12-05 13:52:14.320484433 +0000 UTC m=+5213.740551288" lastFinishedPulling="2025-12-05 13:52:16.780627095 +0000 UTC m=+5216.200693930" observedRunningTime="2025-12-05 13:52:17.378551628 +0000 UTC m=+5216.798618433" watchObservedRunningTime="2025-12-05 13:52:17.387010112 +0000 UTC m=+5216.807076967" Dec 05 13:52:23 crc kubenswrapper[4784]: I1205 13:52:23.300871 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:23 crc kubenswrapper[4784]: I1205 13:52:23.301774 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:23 crc kubenswrapper[4784]: I1205 13:52:23.350702 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:23 crc kubenswrapper[4784]: I1205 13:52:23.449472 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:23 crc kubenswrapper[4784]: I1205 13:52:23.604356 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:24 crc kubenswrapper[4784]: I1205 13:52:24.998745 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:52:24 crc kubenswrapper[4784]: E1205 13:52:24.999512 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.428233 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9gwjp" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="registry-server" containerID="cri-o://f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a" gracePeriod=2 Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.892697 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.976554 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities\") pod \"eec33a98-4519-4b27-a3b0-fd8433e550d0\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.976889 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content\") pod \"eec33a98-4519-4b27-a3b0-fd8433e550d0\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.977059 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtznm\" (UniqueName: \"kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm\") pod \"eec33a98-4519-4b27-a3b0-fd8433e550d0\" (UID: \"eec33a98-4519-4b27-a3b0-fd8433e550d0\") " Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.977608 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities" (OuterVolumeSpecName: "utilities") pod "eec33a98-4519-4b27-a3b0-fd8433e550d0" (UID: "eec33a98-4519-4b27-a3b0-fd8433e550d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.977913 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:52:25 crc kubenswrapper[4784]: I1205 13:52:25.988681 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm" (OuterVolumeSpecName: "kube-api-access-wtznm") pod "eec33a98-4519-4b27-a3b0-fd8433e550d0" (UID: "eec33a98-4519-4b27-a3b0-fd8433e550d0"). InnerVolumeSpecName "kube-api-access-wtznm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.080253 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtznm\" (UniqueName: \"kubernetes.io/projected/eec33a98-4519-4b27-a3b0-fd8433e550d0-kube-api-access-wtznm\") on node \"crc\" DevicePath \"\"" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.347109 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eec33a98-4519-4b27-a3b0-fd8433e550d0" (UID: "eec33a98-4519-4b27-a3b0-fd8433e550d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.386312 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eec33a98-4519-4b27-a3b0-fd8433e550d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.437114 4784 generic.go:334] "Generic (PLEG): container finished" podID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerID="f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a" exitCode=0 Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.437155 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerDied","Data":"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a"} Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.437179 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gwjp" event={"ID":"eec33a98-4519-4b27-a3b0-fd8433e550d0","Type":"ContainerDied","Data":"885b73962b410aa759fc6f44933d3472b7acf8ffbabaa01db3d55812f6227c0d"} Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.437215 4784 scope.go:117] "RemoveContainer" containerID="f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.437333 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gwjp" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.469046 4784 scope.go:117] "RemoveContainer" containerID="693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.473969 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.488783 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9gwjp"] Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.497112 4784 scope.go:117] "RemoveContainer" containerID="7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.545888 4784 scope.go:117] "RemoveContainer" containerID="f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a" Dec 05 13:52:26 crc kubenswrapper[4784]: E1205 13:52:26.547240 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a\": container with ID starting with f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a not found: ID does not exist" containerID="f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.547293 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a"} err="failed to get container status \"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a\": rpc error: code = NotFound desc = could not find container \"f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a\": container with ID starting with f710bcd6030405f8c1ecd99cb623d00f413b512af8b5ffce29abf7b98c51f16a not found: ID does not exist" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.547322 4784 scope.go:117] "RemoveContainer" containerID="693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008" Dec 05 13:52:26 crc kubenswrapper[4784]: E1205 13:52:26.547638 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008\": container with ID starting with 693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008 not found: ID does not exist" containerID="693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.547663 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008"} err="failed to get container status \"693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008\": rpc error: code = NotFound desc = could not find container \"693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008\": container with ID starting with 693dd3e92ce4b0dac7d19a723df987cebce019db18fca3eb092096ddc1786008 not found: ID does not exist" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.547682 4784 scope.go:117] "RemoveContainer" containerID="7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471" Dec 05 13:52:26 crc kubenswrapper[4784]: E1205 13:52:26.547916 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471\": container with ID starting with 7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471 not found: ID does not exist" containerID="7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471" Dec 05 13:52:26 crc kubenswrapper[4784]: I1205 13:52:26.547943 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471"} err="failed to get container status \"7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471\": rpc error: code = NotFound desc = could not find container \"7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471\": container with ID starting with 7e7f11c96dd268136802cd38cb20dd846ea34f5670b21fe90a55cc6439461471 not found: ID does not exist" Dec 05 13:52:27 crc kubenswrapper[4784]: I1205 13:52:27.021567 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" path="/var/lib/kubelet/pods/eec33a98-4519-4b27-a3b0-fd8433e550d0/volumes" Dec 05 13:52:37 crc kubenswrapper[4784]: I1205 13:52:36.999854 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:52:37 crc kubenswrapper[4784]: E1205 13:52:37.000656 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:52:48 crc kubenswrapper[4784]: I1205 13:52:48.999172 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:52:49 crc kubenswrapper[4784]: E1205 13:52:49.000145 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:53:04 crc kubenswrapper[4784]: I1205 13:53:03.999554 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:53:04 crc kubenswrapper[4784]: E1205 13:53:04.000258 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:53:18 crc kubenswrapper[4784]: I1205 13:53:17.999620 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:53:18 crc kubenswrapper[4784]: E1205 13:53:18.000685 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:53:29 crc kubenswrapper[4784]: I1205 13:53:28.999564 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:53:29 crc kubenswrapper[4784]: E1205 13:53:29.002019 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.089165 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:37 crc kubenswrapper[4784]: E1205 13:53:37.091856 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="registry-server" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.091953 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="registry-server" Dec 05 13:53:37 crc kubenswrapper[4784]: E1205 13:53:37.092039 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="extract-content" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.092102 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="extract-content" Dec 05 13:53:37 crc kubenswrapper[4784]: E1205 13:53:37.092225 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="extract-utilities" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.092303 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="extract-utilities" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.092625 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec33a98-4519-4b27-a3b0-fd8433e550d0" containerName="registry-server" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.094341 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.109973 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.149388 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.149639 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktdv9\" (UniqueName: \"kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.149985 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.252220 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.252290 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktdv9\" (UniqueName: \"kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.252358 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.252759 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.252791 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.285914 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktdv9\" (UniqueName: \"kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9\") pod \"redhat-marketplace-jxtfg\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.422004 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:37 crc kubenswrapper[4784]: I1205 13:53:37.947345 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:38 crc kubenswrapper[4784]: I1205 13:53:38.251537 4784 generic.go:334] "Generic (PLEG): container finished" podID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerID="db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb" exitCode=0 Dec 05 13:53:38 crc kubenswrapper[4784]: I1205 13:53:38.251590 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerDied","Data":"db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb"} Dec 05 13:53:38 crc kubenswrapper[4784]: I1205 13:53:38.251620 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerStarted","Data":"1a0660a3d198b60789f6b1ee9ccc3c3b782f6b24ac3d18357324abcbac206d71"} Dec 05 13:53:39 crc kubenswrapper[4784]: I1205 13:53:39.268641 4784 generic.go:334] "Generic (PLEG): container finished" podID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerID="c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8" exitCode=0 Dec 05 13:53:39 crc kubenswrapper[4784]: I1205 13:53:39.268746 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerDied","Data":"c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8"} Dec 05 13:53:40 crc kubenswrapper[4784]: I1205 13:53:40.280575 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerStarted","Data":"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194"} Dec 05 13:53:40 crc kubenswrapper[4784]: I1205 13:53:40.304526 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jxtfg" podStartSLOduration=1.822368746 podStartE2EDuration="3.304493132s" podCreationTimestamp="2025-12-05 13:53:37 +0000 UTC" firstStartedPulling="2025-12-05 13:53:38.254051396 +0000 UTC m=+5297.674118221" lastFinishedPulling="2025-12-05 13:53:39.736175792 +0000 UTC m=+5299.156242607" observedRunningTime="2025-12-05 13:53:40.294976775 +0000 UTC m=+5299.715043590" watchObservedRunningTime="2025-12-05 13:53:40.304493132 +0000 UTC m=+5299.724559947" Dec 05 13:53:41 crc kubenswrapper[4784]: I1205 13:53:41.999538 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:53:42 crc kubenswrapper[4784]: I1205 13:53:42.308922 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15"} Dec 05 13:53:47 crc kubenswrapper[4784]: I1205 13:53:47.422168 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:47 crc kubenswrapper[4784]: I1205 13:53:47.422719 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:47 crc kubenswrapper[4784]: I1205 13:53:47.506078 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:48 crc kubenswrapper[4784]: I1205 13:53:48.439426 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:48 crc kubenswrapper[4784]: I1205 13:53:48.520688 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.379491 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jxtfg" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="registry-server" containerID="cri-o://9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194" gracePeriod=2 Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.866262 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.962890 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content\") pod \"8f61aa20-1831-4616-b43d-b5ea80ef5626\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.962948 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities\") pod \"8f61aa20-1831-4616-b43d-b5ea80ef5626\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.963107 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktdv9\" (UniqueName: \"kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9\") pod \"8f61aa20-1831-4616-b43d-b5ea80ef5626\" (UID: \"8f61aa20-1831-4616-b43d-b5ea80ef5626\") " Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.963971 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities" (OuterVolumeSpecName: "utilities") pod "8f61aa20-1831-4616-b43d-b5ea80ef5626" (UID: "8f61aa20-1831-4616-b43d-b5ea80ef5626"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.970444 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9" (OuterVolumeSpecName: "kube-api-access-ktdv9") pod "8f61aa20-1831-4616-b43d-b5ea80ef5626" (UID: "8f61aa20-1831-4616-b43d-b5ea80ef5626"). InnerVolumeSpecName "kube-api-access-ktdv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:53:50 crc kubenswrapper[4784]: I1205 13:53:50.986060 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f61aa20-1831-4616-b43d-b5ea80ef5626" (UID: "8f61aa20-1831-4616-b43d-b5ea80ef5626"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.065869 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.065907 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f61aa20-1831-4616-b43d-b5ea80ef5626-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.065919 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktdv9\" (UniqueName: \"kubernetes.io/projected/8f61aa20-1831-4616-b43d-b5ea80ef5626-kube-api-access-ktdv9\") on node \"crc\" DevicePath \"\"" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.391517 4784 generic.go:334] "Generic (PLEG): container finished" podID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerID="9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194" exitCode=0 Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.391599 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxtfg" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.391601 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerDied","Data":"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194"} Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.391685 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxtfg" event={"ID":"8f61aa20-1831-4616-b43d-b5ea80ef5626","Type":"ContainerDied","Data":"1a0660a3d198b60789f6b1ee9ccc3c3b782f6b24ac3d18357324abcbac206d71"} Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.391706 4784 scope.go:117] "RemoveContainer" containerID="9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.425805 4784 scope.go:117] "RemoveContainer" containerID="c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.426997 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.440932 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxtfg"] Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.461952 4784 scope.go:117] "RemoveContainer" containerID="db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.520782 4784 scope.go:117] "RemoveContainer" containerID="9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194" Dec 05 13:53:51 crc kubenswrapper[4784]: E1205 13:53:51.521349 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194\": container with ID starting with 9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194 not found: ID does not exist" containerID="9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.521387 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194"} err="failed to get container status \"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194\": rpc error: code = NotFound desc = could not find container \"9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194\": container with ID starting with 9b1cec37a78f5597b73a89f45ee06dc0df23b5b64dffcdb9a7284c41aa1c3194 not found: ID does not exist" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.521415 4784 scope.go:117] "RemoveContainer" containerID="c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8" Dec 05 13:53:51 crc kubenswrapper[4784]: E1205 13:53:51.521710 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8\": container with ID starting with c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8 not found: ID does not exist" containerID="c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.521738 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8"} err="failed to get container status \"c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8\": rpc error: code = NotFound desc = could not find container \"c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8\": container with ID starting with c8fed551d05a2d333c87c27e1691ca0616c14be3196747699abf30474e15ebf8 not found: ID does not exist" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.521756 4784 scope.go:117] "RemoveContainer" containerID="db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb" Dec 05 13:53:51 crc kubenswrapper[4784]: E1205 13:53:51.522179 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb\": container with ID starting with db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb not found: ID does not exist" containerID="db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb" Dec 05 13:53:51 crc kubenswrapper[4784]: I1205 13:53:51.522265 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb"} err="failed to get container status \"db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb\": rpc error: code = NotFound desc = could not find container \"db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb\": container with ID starting with db709abbba74194ed96d18be1803e26f3c1e57bd0568681ee231a735541bfcbb not found: ID does not exist" Dec 05 13:53:53 crc kubenswrapper[4784]: I1205 13:53:53.010545 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" path="/var/lib/kubelet/pods/8f61aa20-1831-4616-b43d-b5ea80ef5626/volumes" Dec 05 13:55:59 crc kubenswrapper[4784]: I1205 13:55:59.572856 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:55:59 crc kubenswrapper[4784]: I1205 13:55:59.573569 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:56:29 crc kubenswrapper[4784]: I1205 13:56:29.572301 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:56:29 crc kubenswrapper[4784]: I1205 13:56:29.572745 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:56:59 crc kubenswrapper[4784]: I1205 13:56:59.572829 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:56:59 crc kubenswrapper[4784]: I1205 13:56:59.573515 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:56:59 crc kubenswrapper[4784]: I1205 13:56:59.573586 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:56:59 crc kubenswrapper[4784]: I1205 13:56:59.574753 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:56:59 crc kubenswrapper[4784]: I1205 13:56:59.574834 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15" gracePeriod=600 Dec 05 13:57:00 crc kubenswrapper[4784]: I1205 13:57:00.426671 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15" exitCode=0 Dec 05 13:57:00 crc kubenswrapper[4784]: I1205 13:57:00.426742 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15"} Dec 05 13:57:00 crc kubenswrapper[4784]: I1205 13:57:00.426974 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44"} Dec 05 13:57:00 crc kubenswrapper[4784]: I1205 13:57:00.426994 4784 scope.go:117] "RemoveContainer" containerID="f57a741206f67acf68eaef09a87dc2b6478253afd299f7f2034ad24699c49dc6" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.496799 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:08 crc kubenswrapper[4784]: E1205 13:58:08.497777 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="extract-utilities" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.497793 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="extract-utilities" Dec 05 13:58:08 crc kubenswrapper[4784]: E1205 13:58:08.497812 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="registry-server" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.497821 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="registry-server" Dec 05 13:58:08 crc kubenswrapper[4784]: E1205 13:58:08.497837 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="extract-content" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.497846 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="extract-content" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.498107 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f61aa20-1831-4616-b43d-b5ea80ef5626" containerName="registry-server" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.500063 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.519806 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.587304 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmrnv\" (UniqueName: \"kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.587699 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.587869 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.689702 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.689831 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmrnv\" (UniqueName: \"kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.689982 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.690327 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.690558 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.718227 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmrnv\" (UniqueName: \"kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv\") pod \"redhat-operators-nhxfd\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:08 crc kubenswrapper[4784]: I1205 13:58:08.826214 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:09 crc kubenswrapper[4784]: I1205 13:58:09.360239 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:10 crc kubenswrapper[4784]: I1205 13:58:10.141627 4784 generic.go:334] "Generic (PLEG): container finished" podID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerID="78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a" exitCode=0 Dec 05 13:58:10 crc kubenswrapper[4784]: I1205 13:58:10.141820 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerDied","Data":"78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a"} Dec 05 13:58:10 crc kubenswrapper[4784]: I1205 13:58:10.142007 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerStarted","Data":"99b06093756066c7b58333f6d13b787ef04ba206b58c96ca2815eb4f3acda3a6"} Dec 05 13:58:10 crc kubenswrapper[4784]: I1205 13:58:10.145564 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 13:58:11 crc kubenswrapper[4784]: I1205 13:58:11.153044 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerStarted","Data":"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2"} Dec 05 13:58:17 crc kubenswrapper[4784]: I1205 13:58:17.217783 4784 generic.go:334] "Generic (PLEG): container finished" podID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerID="caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2" exitCode=0 Dec 05 13:58:17 crc kubenswrapper[4784]: I1205 13:58:17.217848 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerDied","Data":"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2"} Dec 05 13:58:19 crc kubenswrapper[4784]: I1205 13:58:19.238597 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerStarted","Data":"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa"} Dec 05 13:58:19 crc kubenswrapper[4784]: I1205 13:58:19.270909 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nhxfd" podStartSLOduration=3.541975975 podStartE2EDuration="11.270892896s" podCreationTimestamp="2025-12-05 13:58:08 +0000 UTC" firstStartedPulling="2025-12-05 13:58:10.14494807 +0000 UTC m=+5569.565014905" lastFinishedPulling="2025-12-05 13:58:17.873864971 +0000 UTC m=+5577.293931826" observedRunningTime="2025-12-05 13:58:19.266997866 +0000 UTC m=+5578.687064681" watchObservedRunningTime="2025-12-05 13:58:19.270892896 +0000 UTC m=+5578.690959711" Dec 05 13:58:28 crc kubenswrapper[4784]: I1205 13:58:28.826902 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:28 crc kubenswrapper[4784]: I1205 13:58:28.828806 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:28 crc kubenswrapper[4784]: I1205 13:58:28.889880 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.405575 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.836572 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.839078 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.860578 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.870291 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.870385 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d424t\" (UniqueName: \"kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.870846 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.972865 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.973019 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.973073 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d424t\" (UniqueName: \"kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.973636 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.973660 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:29 crc kubenswrapper[4784]: I1205 13:58:29.999403 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d424t\" (UniqueName: \"kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t\") pod \"community-operators-tgkv7\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:30 crc kubenswrapper[4784]: I1205 13:58:30.160562 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:30 crc kubenswrapper[4784]: I1205 13:58:30.720349 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:31 crc kubenswrapper[4784]: I1205 13:58:31.369470 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerStarted","Data":"3a2358d45e943d0a0427ab3953705eca9f70b86cb514c22dc47aa7c2548bc728"} Dec 05 13:58:32 crc kubenswrapper[4784]: I1205 13:58:32.384979 4784 generic.go:334] "Generic (PLEG): container finished" podID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerID="faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388" exitCode=0 Dec 05 13:58:32 crc kubenswrapper[4784]: I1205 13:58:32.385026 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerDied","Data":"faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388"} Dec 05 13:58:34 crc kubenswrapper[4784]: I1205 13:58:34.628392 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:34 crc kubenswrapper[4784]: I1205 13:58:34.628894 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nhxfd" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="registry-server" containerID="cri-o://c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa" gracePeriod=2 Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.159758 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.305231 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content\") pod \"0e48e468-dbe2-4579-a63c-49fad85098ce\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.305415 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities\") pod \"0e48e468-dbe2-4579-a63c-49fad85098ce\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.305542 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmrnv\" (UniqueName: \"kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv\") pod \"0e48e468-dbe2-4579-a63c-49fad85098ce\" (UID: \"0e48e468-dbe2-4579-a63c-49fad85098ce\") " Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.307310 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities" (OuterVolumeSpecName: "utilities") pod "0e48e468-dbe2-4579-a63c-49fad85098ce" (UID: "0e48e468-dbe2-4579-a63c-49fad85098ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.318441 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv" (OuterVolumeSpecName: "kube-api-access-xmrnv") pod "0e48e468-dbe2-4579-a63c-49fad85098ce" (UID: "0e48e468-dbe2-4579-a63c-49fad85098ce"). InnerVolumeSpecName "kube-api-access-xmrnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.407376 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.407413 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmrnv\" (UniqueName: \"kubernetes.io/projected/0e48e468-dbe2-4579-a63c-49fad85098ce-kube-api-access-xmrnv\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.419670 4784 generic.go:334] "Generic (PLEG): container finished" podID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerID="c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa" exitCode=0 Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.419782 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerDied","Data":"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa"} Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.419908 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhxfd" event={"ID":"0e48e468-dbe2-4579-a63c-49fad85098ce","Type":"ContainerDied","Data":"99b06093756066c7b58333f6d13b787ef04ba206b58c96ca2815eb4f3acda3a6"} Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.419936 4784 scope.go:117] "RemoveContainer" containerID="c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.420061 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhxfd" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.421158 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e48e468-dbe2-4579-a63c-49fad85098ce" (UID: "0e48e468-dbe2-4579-a63c-49fad85098ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.422976 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerStarted","Data":"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c"} Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.444620 4784 scope.go:117] "RemoveContainer" containerID="caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.474047 4784 scope.go:117] "RemoveContainer" containerID="78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.507159 4784 scope.go:117] "RemoveContainer" containerID="c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa" Dec 05 13:58:35 crc kubenswrapper[4784]: E1205 13:58:35.507597 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa\": container with ID starting with c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa not found: ID does not exist" containerID="c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.507647 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa"} err="failed to get container status \"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa\": rpc error: code = NotFound desc = could not find container \"c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa\": container with ID starting with c544d13e8e19a399c870f5d31eae291a646be66f142d22fd72e7a4c6caa99baa not found: ID does not exist" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.507683 4784 scope.go:117] "RemoveContainer" containerID="caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2" Dec 05 13:58:35 crc kubenswrapper[4784]: E1205 13:58:35.508041 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2\": container with ID starting with caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2 not found: ID does not exist" containerID="caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.508096 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2"} err="failed to get container status \"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2\": rpc error: code = NotFound desc = could not find container \"caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2\": container with ID starting with caa4720548f485d016308141182f730996981f0835879a62b87d6845d626d7b2 not found: ID does not exist" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.508129 4784 scope.go:117] "RemoveContainer" containerID="78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a" Dec 05 13:58:35 crc kubenswrapper[4784]: E1205 13:58:35.508689 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a\": container with ID starting with 78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a not found: ID does not exist" containerID="78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.508726 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a"} err="failed to get container status \"78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a\": rpc error: code = NotFound desc = could not find container \"78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a\": container with ID starting with 78ba6efcdb612051ac9dbbadd4e3eaf6532da0276144c5d601d60b76dbf61a5a not found: ID does not exist" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.508945 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e48e468-dbe2-4579-a63c-49fad85098ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.754037 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:35 crc kubenswrapper[4784]: I1205 13:58:35.761870 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nhxfd"] Dec 05 13:58:37 crc kubenswrapper[4784]: I1205 13:58:37.016918 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" path="/var/lib/kubelet/pods/0e48e468-dbe2-4579-a63c-49fad85098ce/volumes" Dec 05 13:58:38 crc kubenswrapper[4784]: I1205 13:58:38.455734 4784 generic.go:334] "Generic (PLEG): container finished" podID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerID="75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c" exitCode=0 Dec 05 13:58:38 crc kubenswrapper[4784]: I1205 13:58:38.455844 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerDied","Data":"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c"} Dec 05 13:58:41 crc kubenswrapper[4784]: I1205 13:58:41.488723 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerStarted","Data":"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e"} Dec 05 13:58:42 crc kubenswrapper[4784]: I1205 13:58:42.529780 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tgkv7" podStartSLOduration=5.390277605 podStartE2EDuration="13.529755084s" podCreationTimestamp="2025-12-05 13:58:29 +0000 UTC" firstStartedPulling="2025-12-05 13:58:32.387410341 +0000 UTC m=+5591.807477156" lastFinishedPulling="2025-12-05 13:58:40.52688782 +0000 UTC m=+5599.946954635" observedRunningTime="2025-12-05 13:58:42.518424452 +0000 UTC m=+5601.938491317" watchObservedRunningTime="2025-12-05 13:58:42.529755084 +0000 UTC m=+5601.949821909" Dec 05 13:58:50 crc kubenswrapper[4784]: I1205 13:58:50.162250 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:50 crc kubenswrapper[4784]: I1205 13:58:50.162723 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:50 crc kubenswrapper[4784]: I1205 13:58:50.220963 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:50 crc kubenswrapper[4784]: I1205 13:58:50.667412 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:53 crc kubenswrapper[4784]: I1205 13:58:53.792969 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:53 crc kubenswrapper[4784]: I1205 13:58:53.793728 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tgkv7" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="registry-server" containerID="cri-o://a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e" gracePeriod=2 Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.291683 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.463622 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d424t\" (UniqueName: \"kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t\") pod \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.463947 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities\") pod \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.463985 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content\") pod \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\" (UID: \"9aebe13e-a7c2-45e4-86cb-1fec19ef764a\") " Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.464763 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities" (OuterVolumeSpecName: "utilities") pod "9aebe13e-a7c2-45e4-86cb-1fec19ef764a" (UID: "9aebe13e-a7c2-45e4-86cb-1fec19ef764a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.471315 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t" (OuterVolumeSpecName: "kube-api-access-d424t") pod "9aebe13e-a7c2-45e4-86cb-1fec19ef764a" (UID: "9aebe13e-a7c2-45e4-86cb-1fec19ef764a"). InnerVolumeSpecName "kube-api-access-d424t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.526496 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9aebe13e-a7c2-45e4-86cb-1fec19ef764a" (UID: "9aebe13e-a7c2-45e4-86cb-1fec19ef764a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.566401 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d424t\" (UniqueName: \"kubernetes.io/projected/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-kube-api-access-d424t\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.566670 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.566755 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9aebe13e-a7c2-45e4-86cb-1fec19ef764a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.632997 4784 generic.go:334] "Generic (PLEG): container finished" podID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerID="a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e" exitCode=0 Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.633037 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerDied","Data":"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e"} Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.633065 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tgkv7" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.633560 4784 scope.go:117] "RemoveContainer" containerID="a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.633416 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tgkv7" event={"ID":"9aebe13e-a7c2-45e4-86cb-1fec19ef764a","Type":"ContainerDied","Data":"3a2358d45e943d0a0427ab3953705eca9f70b86cb514c22dc47aa7c2548bc728"} Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.685216 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.685746 4784 scope.go:117] "RemoveContainer" containerID="75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.697953 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tgkv7"] Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.713659 4784 scope.go:117] "RemoveContainer" containerID="faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.754673 4784 scope.go:117] "RemoveContainer" containerID="a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e" Dec 05 13:58:54 crc kubenswrapper[4784]: E1205 13:58:54.755015 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e\": container with ID starting with a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e not found: ID does not exist" containerID="a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.755048 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e"} err="failed to get container status \"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e\": rpc error: code = NotFound desc = could not find container \"a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e\": container with ID starting with a42f13302036cff41061294496f57a07fd73c8f972231c1ae22d773709f3475e not found: ID does not exist" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.755071 4784 scope.go:117] "RemoveContainer" containerID="75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c" Dec 05 13:58:54 crc kubenswrapper[4784]: E1205 13:58:54.755458 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c\": container with ID starting with 75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c not found: ID does not exist" containerID="75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.755482 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c"} err="failed to get container status \"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c\": rpc error: code = NotFound desc = could not find container \"75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c\": container with ID starting with 75363b0217c9c20d693456dc474217ee854aa70a94a27af33d39d05e9ab7c66c not found: ID does not exist" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.755497 4784 scope.go:117] "RemoveContainer" containerID="faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388" Dec 05 13:58:54 crc kubenswrapper[4784]: E1205 13:58:54.755739 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388\": container with ID starting with faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388 not found: ID does not exist" containerID="faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388" Dec 05 13:58:54 crc kubenswrapper[4784]: I1205 13:58:54.755760 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388"} err="failed to get container status \"faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388\": rpc error: code = NotFound desc = could not find container \"faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388\": container with ID starting with faf0b3793097d5fec1d1c92bc9659db75ba22206429c10b6e73a71123bee2388 not found: ID does not exist" Dec 05 13:58:55 crc kubenswrapper[4784]: I1205 13:58:55.011844 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" path="/var/lib/kubelet/pods/9aebe13e-a7c2-45e4-86cb-1fec19ef764a/volumes" Dec 05 13:58:59 crc kubenswrapper[4784]: I1205 13:58:59.578024 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:58:59 crc kubenswrapper[4784]: I1205 13:58:59.578832 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:59:29 crc kubenswrapper[4784]: I1205 13:59:29.572920 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:59:29 crc kubenswrapper[4784]: I1205 13:59:29.573405 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:59:59 crc kubenswrapper[4784]: I1205 13:59:59.572410 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 13:59:59 crc kubenswrapper[4784]: I1205 13:59:59.572991 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 13:59:59 crc kubenswrapper[4784]: I1205 13:59:59.573039 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 13:59:59 crc kubenswrapper[4784]: I1205 13:59:59.573901 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 13:59:59 crc kubenswrapper[4784]: I1205 13:59:59.573955 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" gracePeriod=600 Dec 05 13:59:59 crc kubenswrapper[4784]: E1205 13:59:59.708312 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.164739 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8"] Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165563 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165591 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165612 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165619 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="extract-utilities" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165638 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165648 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165680 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165688 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165704 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165710 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.165723 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.165729 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="extract-content" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.166120 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e48e468-dbe2-4579-a63c-49fad85098ce" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.166159 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aebe13e-a7c2-45e4-86cb-1fec19ef764a" containerName="registry-server" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.167984 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.172001 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.172077 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.199214 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8"] Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.232223 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" exitCode=0 Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.232271 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44"} Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.232324 4784 scope.go:117] "RemoveContainer" containerID="e91af80dcdc2708282af7ccc34e9cb3e1773b16581e83e9cf20d8070de5b3a15" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.233024 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:00:00 crc kubenswrapper[4784]: E1205 14:00:00.233388 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.332770 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.332812 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.332874 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz82r\" (UniqueName: \"kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.434651 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz82r\" (UniqueName: \"kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.434828 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.434851 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.436405 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.441307 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.450415 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz82r\" (UniqueName: \"kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r\") pod \"collect-profiles-29415720-q8sc8\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.496449 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:00 crc kubenswrapper[4784]: I1205 14:00:00.992672 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8"] Dec 05 14:00:01 crc kubenswrapper[4784]: I1205 14:00:01.246847 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" event={"ID":"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de","Type":"ContainerStarted","Data":"120b4f2e8be0be641518e9e90adab3feb71dfd0f3b63e7d52bb4bc19eff5ee97"} Dec 05 14:00:01 crc kubenswrapper[4784]: I1205 14:00:01.246916 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" event={"ID":"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de","Type":"ContainerStarted","Data":"1cbbcea4f1547a708204e28a3e7d4f50eab13fbf49364028996fc5a2cda7cd27"} Dec 05 14:00:01 crc kubenswrapper[4784]: I1205 14:00:01.270513 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" podStartSLOduration=1.270493517 podStartE2EDuration="1.270493517s" podCreationTimestamp="2025-12-05 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:00:01.262279231 +0000 UTC m=+5680.682346046" watchObservedRunningTime="2025-12-05 14:00:01.270493517 +0000 UTC m=+5680.690560332" Dec 05 14:00:02 crc kubenswrapper[4784]: I1205 14:00:02.263625 4784 generic.go:334] "Generic (PLEG): container finished" podID="d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" containerID="120b4f2e8be0be641518e9e90adab3feb71dfd0f3b63e7d52bb4bc19eff5ee97" exitCode=0 Dec 05 14:00:02 crc kubenswrapper[4784]: I1205 14:00:02.263723 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" event={"ID":"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de","Type":"ContainerDied","Data":"120b4f2e8be0be641518e9e90adab3feb71dfd0f3b63e7d52bb4bc19eff5ee97"} Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.634252 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.808638 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume\") pod \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.808799 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz82r\" (UniqueName: \"kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r\") pod \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.808991 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume\") pod \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\" (UID: \"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de\") " Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.809500 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume" (OuterVolumeSpecName: "config-volume") pod "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" (UID: "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.809874 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.815457 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" (UID: "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.818746 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r" (OuterVolumeSpecName: "kube-api-access-hz82r") pod "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" (UID: "d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de"). InnerVolumeSpecName "kube-api-access-hz82r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.913166 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz82r\" (UniqueName: \"kubernetes.io/projected/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-kube-api-access-hz82r\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:03 crc kubenswrapper[4784]: I1205 14:00:03.913245 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:04 crc kubenswrapper[4784]: I1205 14:00:04.285106 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" event={"ID":"d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de","Type":"ContainerDied","Data":"1cbbcea4f1547a708204e28a3e7d4f50eab13fbf49364028996fc5a2cda7cd27"} Dec 05 14:00:04 crc kubenswrapper[4784]: I1205 14:00:04.285159 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cbbcea4f1547a708204e28a3e7d4f50eab13fbf49364028996fc5a2cda7cd27" Dec 05 14:00:04 crc kubenswrapper[4784]: I1205 14:00:04.285517 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415720-q8sc8" Dec 05 14:00:04 crc kubenswrapper[4784]: I1205 14:00:04.349865 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52"] Dec 05 14:00:04 crc kubenswrapper[4784]: I1205 14:00:04.361274 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415675-5br52"] Dec 05 14:00:05 crc kubenswrapper[4784]: I1205 14:00:05.015017 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e39ba41-37a6-47e9-8118-f67c0726721a" path="/var/lib/kubelet/pods/7e39ba41-37a6-47e9-8118-f67c0726721a/volumes" Dec 05 14:00:11 crc kubenswrapper[4784]: I1205 14:00:11.998708 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:00:12 crc kubenswrapper[4784]: E1205 14:00:11.999731 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:00:26 crc kubenswrapper[4784]: I1205 14:00:26.999323 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:00:27 crc kubenswrapper[4784]: E1205 14:00:27.000248 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:00:37 crc kubenswrapper[4784]: I1205 14:00:37.642673 4784 generic.go:334] "Generic (PLEG): container finished" podID="9dc746ad-99ec-4a42-8c05-3c45ece46906" containerID="5280dcdd33903dc7d70c6b29f825f3947fbcb9e708ec9417504fc766659a6481" exitCode=0 Dec 05 14:00:37 crc kubenswrapper[4784]: I1205 14:00:37.642773 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9dc746ad-99ec-4a42-8c05-3c45ece46906","Type":"ContainerDied","Data":"5280dcdd33903dc7d70c6b29f825f3947fbcb9e708ec9417504fc766659a6481"} Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.091513 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.186226 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.186323 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.186418 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.186540 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8fb2\" (UniqueName: \"kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187052 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187405 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187438 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187508 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187572 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.187593 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config\") pod \"9dc746ad-99ec-4a42-8c05-3c45ece46906\" (UID: \"9dc746ad-99ec-4a42-8c05-3c45ece46906\") " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.188806 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data" (OuterVolumeSpecName: "config-data") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.188909 4784 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.195431 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2" (OuterVolumeSpecName: "kube-api-access-z8fb2") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "kube-api-access-z8fb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.196537 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.213217 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.222890 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.224197 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.231412 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.257638 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "9dc746ad-99ec-4a42-8c05-3c45ece46906" (UID: "9dc746ad-99ec-4a42-8c05-3c45ece46906"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.290881 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.290947 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8fb2\" (UniqueName: \"kubernetes.io/projected/9dc746ad-99ec-4a42-8c05-3c45ece46906-kube-api-access-z8fb2\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.290992 4784 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.291007 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.291021 4784 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9dc746ad-99ec-4a42-8c05-3c45ece46906-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.291036 4784 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.291048 4784 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9dc746ad-99ec-4a42-8c05-3c45ece46906-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.291059 4784 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dc746ad-99ec-4a42-8c05-3c45ece46906-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.336650 4784 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.392561 4784 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.665168 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9dc746ad-99ec-4a42-8c05-3c45ece46906","Type":"ContainerDied","Data":"5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf"} Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.665220 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5158ffe5a4ff0a32ac0f53d87d83e1eeea879357b9bbd2d8df9a0602dfe78ddf" Dec 05 14:00:39 crc kubenswrapper[4784]: I1205 14:00:39.665259 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 14:00:40 crc kubenswrapper[4784]: I1205 14:00:40.736068 4784 scope.go:117] "RemoveContainer" containerID="fd6e7cb1b58a61b3b46f83f259fd4bcde1992225d511ced23e89860170adfc48" Dec 05 14:00:41 crc kubenswrapper[4784]: I1205 14:00:41.999318 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:00:42 crc kubenswrapper[4784]: E1205 14:00:41.999808 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.866811 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 14:00:42 crc kubenswrapper[4784]: E1205 14:00:42.867564 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc746ad-99ec-4a42-8c05-3c45ece46906" containerName="tempest-tests-tempest-tests-runner" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.867600 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc746ad-99ec-4a42-8c05-3c45ece46906" containerName="tempest-tests-tempest-tests-runner" Dec 05 14:00:42 crc kubenswrapper[4784]: E1205 14:00:42.867628 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" containerName="collect-profiles" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.867639 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" containerName="collect-profiles" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.867949 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9c7f7cd-64da-4a4a-b2e6-b6d7016ec3de" containerName="collect-profiles" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.867992 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc746ad-99ec-4a42-8c05-3c45ece46906" containerName="tempest-tests-tempest-tests-runner" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.868846 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.871781 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mldzm" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.883932 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.961775 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs98s\" (UniqueName: \"kubernetes.io/projected/a313521b-ac34-4e94-83a4-401c7e1acbbe-kube-api-access-bs98s\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:42 crc kubenswrapper[4784]: I1205 14:00:42.962118 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.063846 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.064001 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs98s\" (UniqueName: \"kubernetes.io/projected/a313521b-ac34-4e94-83a4-401c7e1acbbe-kube-api-access-bs98s\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.064338 4784 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.085116 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs98s\" (UniqueName: \"kubernetes.io/projected/a313521b-ac34-4e94-83a4-401c7e1acbbe-kube-api-access-bs98s\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.089324 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"a313521b-ac34-4e94-83a4-401c7e1acbbe\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.190572 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.694181 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 14:00:43 crc kubenswrapper[4784]: I1205 14:00:43.704292 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"a313521b-ac34-4e94-83a4-401c7e1acbbe","Type":"ContainerStarted","Data":"098742c3935426553636c4cb113ea54001c94f140027a7df581e99283a13e3d8"} Dec 05 14:00:45 crc kubenswrapper[4784]: I1205 14:00:45.721982 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"a313521b-ac34-4e94-83a4-401c7e1acbbe","Type":"ContainerStarted","Data":"5e3e100209e450800acd83abf73238f164d89e78433ea4aff6171edd208b79b0"} Dec 05 14:00:45 crc kubenswrapper[4784]: I1205 14:00:45.741362 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.160889543 podStartE2EDuration="3.741346469s" podCreationTimestamp="2025-12-05 14:00:42 +0000 UTC" firstStartedPulling="2025-12-05 14:00:43.690016552 +0000 UTC m=+5723.110083367" lastFinishedPulling="2025-12-05 14:00:45.270473478 +0000 UTC m=+5724.690540293" observedRunningTime="2025-12-05 14:00:45.733153493 +0000 UTC m=+5725.153220318" watchObservedRunningTime="2025-12-05 14:00:45.741346469 +0000 UTC m=+5725.161413284" Dec 05 14:00:52 crc kubenswrapper[4784]: I1205 14:00:52.999236 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:00:53 crc kubenswrapper[4784]: E1205 14:00:52.999994 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.151309 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415721-9jftk"] Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.153286 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.162936 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415721-9jftk"] Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.231053 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.231237 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xgp2\" (UniqueName: \"kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.231287 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.231331 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.333256 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xgp2\" (UniqueName: \"kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.333348 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.333401 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.333470 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.342063 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.344361 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.346237 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.361382 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xgp2\" (UniqueName: \"kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2\") pod \"keystone-cron-29415721-9jftk\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.488999 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:00 crc kubenswrapper[4784]: W1205 14:01:00.953635 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22fe4207_5d79_42c8_b6fc_b0a0539d17bf.slice/crio-7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735 WatchSource:0}: Error finding container 7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735: Status 404 returned error can't find the container with id 7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735 Dec 05 14:01:00 crc kubenswrapper[4784]: I1205 14:01:00.955297 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415721-9jftk"] Dec 05 14:01:01 crc kubenswrapper[4784]: I1205 14:01:01.882133 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-9jftk" event={"ID":"22fe4207-5d79-42c8-b6fc-b0a0539d17bf","Type":"ContainerStarted","Data":"aa38fc521878f81a51876acf31cc52f56eda7f7ad8ffe85463fd80f748a11974"} Dec 05 14:01:01 crc kubenswrapper[4784]: I1205 14:01:01.882535 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-9jftk" event={"ID":"22fe4207-5d79-42c8-b6fc-b0a0539d17bf","Type":"ContainerStarted","Data":"7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735"} Dec 05 14:01:01 crc kubenswrapper[4784]: I1205 14:01:01.914942 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415721-9jftk" podStartSLOduration=1.9149089639999999 podStartE2EDuration="1.914908964s" podCreationTimestamp="2025-12-05 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:01:01.903396436 +0000 UTC m=+5741.323463511" watchObservedRunningTime="2025-12-05 14:01:01.914908964 +0000 UTC m=+5741.334975809" Dec 05 14:01:04 crc kubenswrapper[4784]: I1205 14:01:03.999756 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:01:04 crc kubenswrapper[4784]: E1205 14:01:04.000696 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:01:04 crc kubenswrapper[4784]: I1205 14:01:04.912093 4784 generic.go:334] "Generic (PLEG): container finished" podID="22fe4207-5d79-42c8-b6fc-b0a0539d17bf" containerID="aa38fc521878f81a51876acf31cc52f56eda7f7ad8ffe85463fd80f748a11974" exitCode=0 Dec 05 14:01:04 crc kubenswrapper[4784]: I1205 14:01:04.912181 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-9jftk" event={"ID":"22fe4207-5d79-42c8-b6fc-b0a0539d17bf","Type":"ContainerDied","Data":"aa38fc521878f81a51876acf31cc52f56eda7f7ad8ffe85463fd80f748a11974"} Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.319515 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.368756 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys\") pod \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.368878 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xgp2\" (UniqueName: \"kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2\") pod \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.369019 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle\") pod \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.369159 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data\") pod \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\" (UID: \"22fe4207-5d79-42c8-b6fc-b0a0539d17bf\") " Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.375272 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2" (OuterVolumeSpecName: "kube-api-access-6xgp2") pod "22fe4207-5d79-42c8-b6fc-b0a0539d17bf" (UID: "22fe4207-5d79-42c8-b6fc-b0a0539d17bf"). InnerVolumeSpecName "kube-api-access-6xgp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.377130 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "22fe4207-5d79-42c8-b6fc-b0a0539d17bf" (UID: "22fe4207-5d79-42c8-b6fc-b0a0539d17bf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.399522 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22fe4207-5d79-42c8-b6fc-b0a0539d17bf" (UID: "22fe4207-5d79-42c8-b6fc-b0a0539d17bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.420035 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data" (OuterVolumeSpecName: "config-data") pod "22fe4207-5d79-42c8-b6fc-b0a0539d17bf" (UID: "22fe4207-5d79-42c8-b6fc-b0a0539d17bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.471558 4784 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.471590 4784 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.471602 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xgp2\" (UniqueName: \"kubernetes.io/projected/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-kube-api-access-6xgp2\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.471612 4784 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22fe4207-5d79-42c8-b6fc-b0a0539d17bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.932056 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415721-9jftk" event={"ID":"22fe4207-5d79-42c8-b6fc-b0a0539d17bf","Type":"ContainerDied","Data":"7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735"} Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.932407 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7359f03078789977c13017d366883fe1067b368eb5a155edd44b893b44688735" Dec 05 14:01:06 crc kubenswrapper[4784]: I1205 14:01:06.932366 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415721-9jftk" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.427564 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xqjjn/must-gather-gsxrd"] Dec 05 14:01:14 crc kubenswrapper[4784]: E1205 14:01:14.428501 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22fe4207-5d79-42c8-b6fc-b0a0539d17bf" containerName="keystone-cron" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.428515 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="22fe4207-5d79-42c8-b6fc-b0a0539d17bf" containerName="keystone-cron" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.428722 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="22fe4207-5d79-42c8-b6fc-b0a0539d17bf" containerName="keystone-cron" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.429874 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.432901 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xqjjn"/"kube-root-ca.crt" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.433231 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xqjjn"/"openshift-service-ca.crt" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.437867 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.437909 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgp4k\" (UniqueName: \"kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.445256 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xqjjn/must-gather-gsxrd"] Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.539344 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.539482 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgp4k\" (UniqueName: \"kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.540065 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.565493 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgp4k\" (UniqueName: \"kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k\") pod \"must-gather-gsxrd\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:14 crc kubenswrapper[4784]: I1205 14:01:14.748448 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:01:15 crc kubenswrapper[4784]: I1205 14:01:15.332893 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xqjjn/must-gather-gsxrd"] Dec 05 14:01:16 crc kubenswrapper[4784]: I1205 14:01:16.025072 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" event={"ID":"7ea9afcc-76ac-4636-96ef-8b754926648c","Type":"ContainerStarted","Data":"ec5d36a883bba278fdd7e5654b84f5324ee527d6a85dfce9dcde893f990d668a"} Dec 05 14:01:17 crc kubenswrapper[4784]: I1205 14:01:17.999244 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:01:18 crc kubenswrapper[4784]: E1205 14:01:18.000337 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:01:24 crc kubenswrapper[4784]: I1205 14:01:24.147518 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" event={"ID":"7ea9afcc-76ac-4636-96ef-8b754926648c","Type":"ContainerStarted","Data":"be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985"} Dec 05 14:01:24 crc kubenswrapper[4784]: I1205 14:01:24.148134 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" event={"ID":"7ea9afcc-76ac-4636-96ef-8b754926648c","Type":"ContainerStarted","Data":"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1"} Dec 05 14:01:24 crc kubenswrapper[4784]: I1205 14:01:24.171242 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" podStartSLOduration=1.8815412139999999 podStartE2EDuration="10.171221277s" podCreationTimestamp="2025-12-05 14:01:14 +0000 UTC" firstStartedPulling="2025-12-05 14:01:15.340918118 +0000 UTC m=+5754.760984943" lastFinishedPulling="2025-12-05 14:01:23.630598191 +0000 UTC m=+5763.050665006" observedRunningTime="2025-12-05 14:01:24.163156845 +0000 UTC m=+5763.583223670" watchObservedRunningTime="2025-12-05 14:01:24.171221277 +0000 UTC m=+5763.591288102" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.674278 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-grqc4"] Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.676134 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.678160 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xqjjn"/"default-dockercfg-qrc2j" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.737950 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncjwq\" (UniqueName: \"kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.738156 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.840949 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncjwq\" (UniqueName: \"kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.841169 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.841366 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.867349 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncjwq\" (UniqueName: \"kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq\") pod \"crc-debug-grqc4\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:27 crc kubenswrapper[4784]: I1205 14:01:27.993732 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:01:28 crc kubenswrapper[4784]: W1205 14:01:28.026374 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05b00355_b1a0_4dc3_8ca6_c20832d21dce.slice/crio-1c9515be336a44e09c65786b47db74e3626cd31ea064a08958a2d223bb32d697 WatchSource:0}: Error finding container 1c9515be336a44e09c65786b47db74e3626cd31ea064a08958a2d223bb32d697: Status 404 returned error can't find the container with id 1c9515be336a44e09c65786b47db74e3626cd31ea064a08958a2d223bb32d697 Dec 05 14:01:28 crc kubenswrapper[4784]: I1205 14:01:28.207931 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" event={"ID":"05b00355-b1a0-4dc3-8ca6-c20832d21dce","Type":"ContainerStarted","Data":"1c9515be336a44e09c65786b47db74e3626cd31ea064a08958a2d223bb32d697"} Dec 05 14:01:32 crc kubenswrapper[4784]: I1205 14:01:32.999275 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:01:33 crc kubenswrapper[4784]: E1205 14:01:33.000021 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:01:41 crc kubenswrapper[4784]: I1205 14:01:41.344121 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" event={"ID":"05b00355-b1a0-4dc3-8ca6-c20832d21dce","Type":"ContainerStarted","Data":"32b30fbc20d149ed33b1a825784ec1804fdbac85f0989fa3df44a4e58025a57c"} Dec 05 14:01:41 crc kubenswrapper[4784]: I1205 14:01:41.366672 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" podStartSLOduration=1.5825017190000001 podStartE2EDuration="14.366653922s" podCreationTimestamp="2025-12-05 14:01:27 +0000 UTC" firstStartedPulling="2025-12-05 14:01:28.029607882 +0000 UTC m=+5767.449674697" lastFinishedPulling="2025-12-05 14:01:40.813760075 +0000 UTC m=+5780.233826900" observedRunningTime="2025-12-05 14:01:41.360553073 +0000 UTC m=+5780.780619888" watchObservedRunningTime="2025-12-05 14:01:41.366653922 +0000 UTC m=+5780.786720737" Dec 05 14:01:45 crc kubenswrapper[4784]: I1205 14:01:45.998494 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:01:46 crc kubenswrapper[4784]: E1205 14:01:45.999445 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:01:58 crc kubenswrapper[4784]: I1205 14:01:58.999054 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:01:59 crc kubenswrapper[4784]: E1205 14:01:58.999853 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:02:11 crc kubenswrapper[4784]: I1205 14:02:11.999448 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:02:12 crc kubenswrapper[4784]: E1205 14:02:12.000177 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:02:25 crc kubenswrapper[4784]: I1205 14:02:25.999199 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:02:26 crc kubenswrapper[4784]: E1205 14:02:25.999926 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.319324 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.325740 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.340299 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.474763 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.474868 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.474903 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6wbp\" (UniqueName: \"kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.577129 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.577287 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.577333 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6wbp\" (UniqueName: \"kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.577672 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.577992 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.603481 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6wbp\" (UniqueName: \"kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp\") pod \"certified-operators-rw6b4\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:29 crc kubenswrapper[4784]: I1205 14:02:29.655895 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:30 crc kubenswrapper[4784]: I1205 14:02:30.280890 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:30 crc kubenswrapper[4784]: I1205 14:02:30.789088 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerStarted","Data":"7c147023eef00f47a9462f96ac5857ae6317cbb79961f240f874e7644f2888e8"} Dec 05 14:02:31 crc kubenswrapper[4784]: I1205 14:02:31.799920 4784 generic.go:334] "Generic (PLEG): container finished" podID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerID="4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f" exitCode=0 Dec 05 14:02:31 crc kubenswrapper[4784]: I1205 14:02:31.799982 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerDied","Data":"4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f"} Dec 05 14:02:32 crc kubenswrapper[4784]: I1205 14:02:32.816082 4784 generic.go:334] "Generic (PLEG): container finished" podID="05b00355-b1a0-4dc3-8ca6-c20832d21dce" containerID="32b30fbc20d149ed33b1a825784ec1804fdbac85f0989fa3df44a4e58025a57c" exitCode=0 Dec 05 14:02:32 crc kubenswrapper[4784]: I1205 14:02:32.816162 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" event={"ID":"05b00355-b1a0-4dc3-8ca6-c20832d21dce","Type":"ContainerDied","Data":"32b30fbc20d149ed33b1a825784ec1804fdbac85f0989fa3df44a4e58025a57c"} Dec 05 14:02:33 crc kubenswrapper[4784]: I1205 14:02:33.829982 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerStarted","Data":"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975"} Dec 05 14:02:33 crc kubenswrapper[4784]: I1205 14:02:33.955605 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:02:33 crc kubenswrapper[4784]: I1205 14:02:33.992119 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-grqc4"] Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.002883 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-grqc4"] Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.104069 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncjwq\" (UniqueName: \"kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq\") pod \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.104120 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host\") pod \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\" (UID: \"05b00355-b1a0-4dc3-8ca6-c20832d21dce\") " Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.104308 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host" (OuterVolumeSpecName: "host") pod "05b00355-b1a0-4dc3-8ca6-c20832d21dce" (UID: "05b00355-b1a0-4dc3-8ca6-c20832d21dce"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.104675 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/05b00355-b1a0-4dc3-8ca6-c20832d21dce-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.842643 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c9515be336a44e09c65786b47db74e3626cd31ea064a08958a2d223bb32d697" Dec 05 14:02:34 crc kubenswrapper[4784]: I1205 14:02:34.842832 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-grqc4" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.086279 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq" (OuterVolumeSpecName: "kube-api-access-ncjwq") pod "05b00355-b1a0-4dc3-8ca6-c20832d21dce" (UID: "05b00355-b1a0-4dc3-8ca6-c20832d21dce"). InnerVolumeSpecName "kube-api-access-ncjwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.128363 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncjwq\" (UniqueName: \"kubernetes.io/projected/05b00355-b1a0-4dc3-8ca6-c20832d21dce-kube-api-access-ncjwq\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.227367 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-rhd75"] Dec 05 14:02:35 crc kubenswrapper[4784]: E1205 14:02:35.227795 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05b00355-b1a0-4dc3-8ca6-c20832d21dce" containerName="container-00" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.227811 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="05b00355-b1a0-4dc3-8ca6-c20832d21dce" containerName="container-00" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.228017 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="05b00355-b1a0-4dc3-8ca6-c20832d21dce" containerName="container-00" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.228757 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.231158 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xqjjn"/"default-dockercfg-qrc2j" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.332349 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.332427 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdjw5\" (UniqueName: \"kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.435369 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.435451 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdjw5\" (UniqueName: \"kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.435568 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.463207 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdjw5\" (UniqueName: \"kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5\") pod \"crc-debug-rhd75\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.545990 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:35 crc kubenswrapper[4784]: W1205 14:02:35.573825 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ffcfef3_8ffd_426d_9fd0_543b3dc53d3e.slice/crio-f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c WatchSource:0}: Error finding container f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c: Status 404 returned error can't find the container with id f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.854388 4784 generic.go:334] "Generic (PLEG): container finished" podID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerID="e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975" exitCode=0 Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.854486 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerDied","Data":"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975"} Dec 05 14:02:35 crc kubenswrapper[4784]: I1205 14:02:35.856256 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" event={"ID":"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e","Type":"ContainerStarted","Data":"f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c"} Dec 05 14:02:36 crc kubenswrapper[4784]: I1205 14:02:36.865989 4784 generic.go:334] "Generic (PLEG): container finished" podID="0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" containerID="ee2cfa62d4ebe9c98a876dbce7573fa68e8fc49c6c65400b5fc7a9c86f353c20" exitCode=0 Dec 05 14:02:36 crc kubenswrapper[4784]: I1205 14:02:36.866037 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" event={"ID":"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e","Type":"ContainerDied","Data":"ee2cfa62d4ebe9c98a876dbce7573fa68e8fc49c6c65400b5fc7a9c86f353c20"} Dec 05 14:02:36 crc kubenswrapper[4784]: I1205 14:02:36.870097 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerStarted","Data":"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7"} Dec 05 14:02:36 crc kubenswrapper[4784]: I1205 14:02:36.910087 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rw6b4" podStartSLOduration=3.19153496 podStartE2EDuration="7.910064953s" podCreationTimestamp="2025-12-05 14:02:29 +0000 UTC" firstStartedPulling="2025-12-05 14:02:31.802842262 +0000 UTC m=+5831.222909097" lastFinishedPulling="2025-12-05 14:02:36.521372245 +0000 UTC m=+5835.941439090" observedRunningTime="2025-12-05 14:02:36.901396183 +0000 UTC m=+5836.321462998" watchObservedRunningTime="2025-12-05 14:02:36.910064953 +0000 UTC m=+5836.330131768" Dec 05 14:02:37 crc kubenswrapper[4784]: I1205 14:02:37.016337 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05b00355-b1a0-4dc3-8ca6-c20832d21dce" path="/var/lib/kubelet/pods/05b00355-b1a0-4dc3-8ca6-c20832d21dce/volumes" Dec 05 14:02:37 crc kubenswrapper[4784]: I1205 14:02:37.998764 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:02:37 crc kubenswrapper[4784]: I1205 14:02:37.999151 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:37 crc kubenswrapper[4784]: E1205 14:02:37.999284 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.093236 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host\") pod \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.093354 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host" (OuterVolumeSpecName: "host") pod "0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" (UID: "0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.093472 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdjw5\" (UniqueName: \"kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5\") pod \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\" (UID: \"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e\") " Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.094284 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.100116 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5" (OuterVolumeSpecName: "kube-api-access-zdjw5") pod "0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" (UID: "0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e"). InnerVolumeSpecName "kube-api-access-zdjw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.195851 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdjw5\" (UniqueName: \"kubernetes.io/projected/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e-kube-api-access-zdjw5\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.892171 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" event={"ID":"0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e","Type":"ContainerDied","Data":"f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c"} Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.892319 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f29fa9ed89525bf858db531e7178349d1faedb01592fe8d48f1664ff7b983b7c" Dec 05 14:02:38 crc kubenswrapper[4784]: I1205 14:02:38.892393 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-rhd75" Dec 05 14:02:39 crc kubenswrapper[4784]: I1205 14:02:39.165292 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-rhd75"] Dec 05 14:02:39 crc kubenswrapper[4784]: I1205 14:02:39.177490 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-rhd75"] Dec 05 14:02:39 crc kubenswrapper[4784]: I1205 14:02:39.656761 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:39 crc kubenswrapper[4784]: I1205 14:02:39.656845 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:39 crc kubenswrapper[4784]: I1205 14:02:39.709285 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.353684 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-cjqg2"] Dec 05 14:02:40 crc kubenswrapper[4784]: E1205 14:02:40.355476 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" containerName="container-00" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.355615 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" containerName="container-00" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.356013 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" containerName="container-00" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.357029 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.358763 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xqjjn"/"default-dockercfg-qrc2j" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.545418 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q69n6\" (UniqueName: \"kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.545965 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.647954 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.648095 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q69n6\" (UniqueName: \"kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.648155 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.668628 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q69n6\" (UniqueName: \"kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6\") pod \"crc-debug-cjqg2\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.679289 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:40 crc kubenswrapper[4784]: I1205 14:02:40.911546 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" event={"ID":"75ca708c-2cce-4301-9bfc-5c869256aed5","Type":"ContainerStarted","Data":"227d2c0b0bdac406ff1974e55b961f3353f11df3eec1aa5a275a0cc2fb45d72b"} Dec 05 14:02:41 crc kubenswrapper[4784]: I1205 14:02:41.013743 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e" path="/var/lib/kubelet/pods/0ffcfef3-8ffd-426d-9fd0-543b3dc53d3e/volumes" Dec 05 14:02:41 crc kubenswrapper[4784]: I1205 14:02:41.922304 4784 generic.go:334] "Generic (PLEG): container finished" podID="75ca708c-2cce-4301-9bfc-5c869256aed5" containerID="736332c4de81ea28950e9cce2cc6b9faeada9ac439208cbc775b793efa2356e1" exitCode=0 Dec 05 14:02:41 crc kubenswrapper[4784]: I1205 14:02:41.922355 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" event={"ID":"75ca708c-2cce-4301-9bfc-5c869256aed5","Type":"ContainerDied","Data":"736332c4de81ea28950e9cce2cc6b9faeada9ac439208cbc775b793efa2356e1"} Dec 05 14:02:41 crc kubenswrapper[4784]: I1205 14:02:41.959137 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-cjqg2"] Dec 05 14:02:41 crc kubenswrapper[4784]: I1205 14:02:41.969453 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xqjjn/crc-debug-cjqg2"] Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.048100 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.199614 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host\") pod \"75ca708c-2cce-4301-9bfc-5c869256aed5\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.199705 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host" (OuterVolumeSpecName: "host") pod "75ca708c-2cce-4301-9bfc-5c869256aed5" (UID: "75ca708c-2cce-4301-9bfc-5c869256aed5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.199720 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q69n6\" (UniqueName: \"kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6\") pod \"75ca708c-2cce-4301-9bfc-5c869256aed5\" (UID: \"75ca708c-2cce-4301-9bfc-5c869256aed5\") " Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.200584 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75ca708c-2cce-4301-9bfc-5c869256aed5-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.205232 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6" (OuterVolumeSpecName: "kube-api-access-q69n6") pod "75ca708c-2cce-4301-9bfc-5c869256aed5" (UID: "75ca708c-2cce-4301-9bfc-5c869256aed5"). InnerVolumeSpecName "kube-api-access-q69n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.303843 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q69n6\" (UniqueName: \"kubernetes.io/projected/75ca708c-2cce-4301-9bfc-5c869256aed5-kube-api-access-q69n6\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.950946 4784 scope.go:117] "RemoveContainer" containerID="736332c4de81ea28950e9cce2cc6b9faeada9ac439208cbc775b793efa2356e1" Dec 05 14:02:43 crc kubenswrapper[4784]: I1205 14:02:43.951008 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/crc-debug-cjqg2" Dec 05 14:02:45 crc kubenswrapper[4784]: I1205 14:02:45.010337 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75ca708c-2cce-4301-9bfc-5c869256aed5" path="/var/lib/kubelet/pods/75ca708c-2cce-4301-9bfc-5c869256aed5/volumes" Dec 05 14:02:49 crc kubenswrapper[4784]: I1205 14:02:48.999358 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:02:49 crc kubenswrapper[4784]: E1205 14:02:49.000539 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:02:49 crc kubenswrapper[4784]: I1205 14:02:49.706263 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:49 crc kubenswrapper[4784]: I1205 14:02:49.757173 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.007482 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rw6b4" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="registry-server" containerID="cri-o://7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7" gracePeriod=2 Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.642158 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.758779 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities\") pod \"cae07b75-3cce-4e2a-8632-c4af29d56bab\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.759047 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content\") pod \"cae07b75-3cce-4e2a-8632-c4af29d56bab\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.759109 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6wbp\" (UniqueName: \"kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp\") pod \"cae07b75-3cce-4e2a-8632-c4af29d56bab\" (UID: \"cae07b75-3cce-4e2a-8632-c4af29d56bab\") " Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.760326 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities" (OuterVolumeSpecName: "utilities") pod "cae07b75-3cce-4e2a-8632-c4af29d56bab" (UID: "cae07b75-3cce-4e2a-8632-c4af29d56bab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.779616 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp" (OuterVolumeSpecName: "kube-api-access-x6wbp") pod "cae07b75-3cce-4e2a-8632-c4af29d56bab" (UID: "cae07b75-3cce-4e2a-8632-c4af29d56bab"). InnerVolumeSpecName "kube-api-access-x6wbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.828807 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cae07b75-3cce-4e2a-8632-c4af29d56bab" (UID: "cae07b75-3cce-4e2a-8632-c4af29d56bab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.861140 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.861179 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6wbp\" (UniqueName: \"kubernetes.io/projected/cae07b75-3cce-4e2a-8632-c4af29d56bab-kube-api-access-x6wbp\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:50 crc kubenswrapper[4784]: I1205 14:02:50.861236 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cae07b75-3cce-4e2a-8632-c4af29d56bab-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.028977 4784 generic.go:334] "Generic (PLEG): container finished" podID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerID="7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7" exitCode=0 Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.029026 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerDied","Data":"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7"} Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.029081 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rw6b4" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.029101 4784 scope.go:117] "RemoveContainer" containerID="7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.029087 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rw6b4" event={"ID":"cae07b75-3cce-4e2a-8632-c4af29d56bab","Type":"ContainerDied","Data":"7c147023eef00f47a9462f96ac5857ae6317cbb79961f240f874e7644f2888e8"} Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.052919 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.059343 4784 scope.go:117] "RemoveContainer" containerID="e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.066421 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rw6b4"] Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.078062 4784 scope.go:117] "RemoveContainer" containerID="4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.148845 4784 scope.go:117] "RemoveContainer" containerID="7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7" Dec 05 14:02:51 crc kubenswrapper[4784]: E1205 14:02:51.151546 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7\": container with ID starting with 7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7 not found: ID does not exist" containerID="7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.151612 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7"} err="failed to get container status \"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7\": rpc error: code = NotFound desc = could not find container \"7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7\": container with ID starting with 7db869ce4a0c522cfbac0766abd20158414f070fe1db187429c1672b96bea4d7 not found: ID does not exist" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.151652 4784 scope.go:117] "RemoveContainer" containerID="e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975" Dec 05 14:02:51 crc kubenswrapper[4784]: E1205 14:02:51.154293 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975\": container with ID starting with e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975 not found: ID does not exist" containerID="e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.154350 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975"} err="failed to get container status \"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975\": rpc error: code = NotFound desc = could not find container \"e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975\": container with ID starting with e8cb46313105c7849f491cd369426ac733fdd04147f61d01914070500d055975 not found: ID does not exist" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.154386 4784 scope.go:117] "RemoveContainer" containerID="4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f" Dec 05 14:02:51 crc kubenswrapper[4784]: E1205 14:02:51.155983 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f\": container with ID starting with 4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f not found: ID does not exist" containerID="4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f" Dec 05 14:02:51 crc kubenswrapper[4784]: I1205 14:02:51.156033 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f"} err="failed to get container status \"4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f\": rpc error: code = NotFound desc = could not find container \"4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f\": container with ID starting with 4eee3ba5f4a24c5c934bafdfbac8f76a68cfd9a355c9854c749481b2029f0f3f not found: ID does not exist" Dec 05 14:02:53 crc kubenswrapper[4784]: I1205 14:02:53.013847 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" path="/var/lib/kubelet/pods/cae07b75-3cce-4e2a-8632-c4af29d56bab/volumes" Dec 05 14:02:59 crc kubenswrapper[4784]: I1205 14:02:59.999056 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:03:00 crc kubenswrapper[4784]: E1205 14:02:59.999914 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.006478 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:03:11 crc kubenswrapper[4784]: E1205 14:03:11.007265 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.032567 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b64cff454-t47d4_20cab8a1-167e-4a61-9de1-dbca99fc6978/barbican-api/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.039016 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b64cff454-t47d4_20cab8a1-167e-4a61-9de1-dbca99fc6978/barbican-api-log/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.283267 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5866d77f58-dcc8l_d1f830fd-3c91-4985-ac6f-96314a74acc1/barbican-keystone-listener/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.290387 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5866d77f58-dcc8l_d1f830fd-3c91-4985-ac6f-96314a74acc1/barbican-keystone-listener-log/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.425679 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-655f48fc8f-k55s9_b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2/barbican-worker/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.516552 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-655f48fc8f-k55s9_b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2/barbican-worker-log/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.586899 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn_345a4940-4998-4cbc-bd5e-89bea1eec60b/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.770454 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/ceilometer-notification-agent/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.790600 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/ceilometer-central-agent/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.812605 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/proxy-httpd/0.log" Dec 05 14:03:11 crc kubenswrapper[4784]: I1205 14:03:11.915072 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/sg-core/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.033898 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_630347d3-27c8-4ef8-8bc4-f06ff57474ed/cinder-api-log/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.351002 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_9db8a5e9-71b5-49aa-a45d-1361d3a021c9/probe/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.568381 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_9db8a5e9-71b5-49aa-a45d-1361d3a021c9/cinder-backup/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.590268 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_630347d3-27c8-4ef8-8bc4-f06ff57474ed/cinder-api/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.648396 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7543eb24-2b15-498b-b447-9f1f47fef1f0/cinder-scheduler/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.794021 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7543eb24-2b15-498b-b447-9f1f47fef1f0/probe/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.899339 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed/probe/0.log" Dec 05 14:03:12 crc kubenswrapper[4784]: I1205 14:03:12.960163 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed/cinder-volume/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.160312 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_60971449-2443-4cba-90d2-7d1c6ba8acdd/probe/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.246570 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_60971449-2443-4cba-90d2-7d1c6ba8acdd/cinder-volume/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.333539 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-mh867_face2e9b-424c-4b68-8b2d-8f00b1e79256/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.458748 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5_b0194359-b6ce-4590-b835-c81b0c992ca1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.553806 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/init/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.754044 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/init/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.813110 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw_a134d50c-87cd-4225-b873-1c6b1d2a0151/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:13 crc kubenswrapper[4784]: I1205 14:03:13.901060 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/dnsmasq-dns/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.031906 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_60863f45-1c7b-4e86-8782-aece4b178edb/glance-httpd/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.143327 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_60863f45-1c7b-4e86-8782-aece4b178edb/glance-log/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.237557 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_4f0ebe85-0cf4-4cbf-9b72-1561ca313666/glance-httpd/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.309244 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_4f0ebe85-0cf4-4cbf-9b72-1561ca313666/glance-log/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.427690 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cdb6b7d4-mvtql_5eba67d7-3c83-47c9-bdc2-0946f5839efd/horizon/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.749595 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5_8021b39a-1235-4fb9-8ef4-ae1ff51e7835/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:14 crc kubenswrapper[4784]: I1205 14:03:14.981966 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-kphz8_db4f2cc1-d1a2-42af-a45b-04e866b92d97/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.080122 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cdb6b7d4-mvtql_5eba67d7-3c83-47c9-bdc2-0946f5839efd/horizon-log/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.456391 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415661-ckq8x_0090d182-b58b-4c0b-83b0-82ce94675e65/keystone-cron/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.565723 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415721-9jftk_22fe4207-5d79-42c8-b6fc-b0a0539d17bf/keystone-cron/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.687476 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6df6c9b849-hzswf_74bc0f27-17f2-4980-8c67-3a980c2e267d/keystone-api/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.690078 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_20314f76-fd12-4756-96b9-88485d32d3e0/kube-state-metrics/0.log" Dec 05 14:03:15 crc kubenswrapper[4784]: I1205 14:03:15.801619 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d_ba1cfa9d-6665-4a66-a134-28fae26e36a2/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:16 crc kubenswrapper[4784]: I1205 14:03:16.312666 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts_ab97b4b4-1696-43ea-b462-56bcd34dda98/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:16 crc kubenswrapper[4784]: I1205 14:03:16.392949 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6bc87b8895-m5b7r_c4c57012-5781-4940-9551-6a53e2f9fad3/neutron-httpd/0.log" Dec 05 14:03:16 crc kubenswrapper[4784]: I1205 14:03:16.427364 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6bc87b8895-m5b7r_c4c57012-5781-4940-9551-6a53e2f9fad3/neutron-api/0.log" Dec 05 14:03:16 crc kubenswrapper[4784]: I1205 14:03:16.939457 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_306ca66e-9ffa-49fd-b2ad-1021c24fa070/nova-cell0-conductor-conductor/0.log" Dec 05 14:03:17 crc kubenswrapper[4784]: I1205 14:03:17.307454 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2f743e41-0208-45ae-940c-104f0c9442ba/nova-cell1-conductor-conductor/0.log" Dec 05 14:03:17 crc kubenswrapper[4784]: I1205 14:03:17.532283 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 14:03:17 crc kubenswrapper[4784]: I1205 14:03:17.844167 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-m6kfz_73ebab61-4062-476d-84bc-1013b097d5ac/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:17 crc kubenswrapper[4784]: I1205 14:03:17.946941 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e693ebe6-ec98-4906-9a85-25a5a8a3c871/nova-api-log/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.040769 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e099c7b6-c61f-4426-a17a-ca13ca695a1e/nova-metadata-log/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.335266 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e693ebe6-ec98-4906-9a85-25a5a8a3c871/nova-api-api/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.531562 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/mysql-bootstrap/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.559341 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_df7dbd5c-eb86-4431-8cdd-59b57dcfc381/nova-scheduler-scheduler/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.802771 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/mysql-bootstrap/0.log" Dec 05 14:03:18 crc kubenswrapper[4784]: I1205 14:03:18.813439 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/galera/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.031555 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/mysql-bootstrap/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.223547 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/mysql-bootstrap/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.346363 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/galera/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.455426 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a90b38e9-d09e-4f72-9d73-85c2226e4049/openstackclient/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.557168 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ljc2l_938888bc-6cef-410e-b517-9fdb0c824405/ovn-controller/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.730605 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sjfhv_646c01bd-0f76-4fbc-aae4-9d679cde5796/openstack-network-exporter/0.log" Dec 05 14:03:19 crc kubenswrapper[4784]: I1205 14:03:19.945178 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server-init/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.321522 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e099c7b6-c61f-4426-a17a-ca13ca695a1e/nova-metadata-metadata/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.478325 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server-init/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.533103 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.715339 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qhm79_8014c4e6-3539-4d7f-95c3-bb37c4a1e08e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.799435 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_961dcf80-20f3-48f5-818b-2c497ce58e01/openstack-network-exporter/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.802280 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovs-vswitchd/0.log" Dec 05 14:03:20 crc kubenswrapper[4784]: I1205 14:03:20.898240 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_961dcf80-20f3-48f5-818b-2c497ce58e01/ovn-northd/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.049673 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3af95a38-f2ad-44f7-a99d-77d48faa79f8/openstack-network-exporter/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.076245 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3af95a38-f2ad-44f7-a99d-77d48faa79f8/ovsdbserver-nb/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.256441 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de4ea412-229e-4e53-97ff-86a923c47aac/openstack-network-exporter/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.277049 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de4ea412-229e-4e53-97ff-86a923c47aac/ovsdbserver-sb/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.668465 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59c9dd888d-55zdv_e09b58a6-8baa-4c70-92dc-f54061239d1b/placement-api/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.668791 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/init-config-reloader/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.709559 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59c9dd888d-55zdv_e09b58a6-8baa-4c70-92dc-f54061239d1b/placement-log/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.915940 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/init-config-reloader/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.920473 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/prometheus/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.958628 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/config-reloader/0.log" Dec 05 14:03:21 crc kubenswrapper[4784]: I1205 14:03:21.993324 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/thanos-sidecar/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.146052 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/setup-container/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.367548 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/setup-container/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.368950 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/rabbitmq/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.488264 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/setup-container/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.675453 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/setup-container/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.697436 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/rabbitmq/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.779415 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/setup-container/0.log" Dec 05 14:03:22 crc kubenswrapper[4784]: I1205 14:03:22.985583 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/setup-container/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.019453 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/rabbitmq/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.055446 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx_5f5f986e-311c-41da-aae4-18d6f3520749/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.243764 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-djr9k_e26d5696-c749-46ed-9f75-f07d0c46c076/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.346697 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx_ced78d37-0ef6-4a75-903d-7db8946f38f4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.504712 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-rk64x_238cfc14-62ab-498c-acc8-ec79cea43fa8/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.561651 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-wzblc_304b4ee8-7619-47a1-970d-5fbeb6c24e96/ssh-known-hosts-edpm-deployment/0.log" Dec 05 14:03:23 crc kubenswrapper[4784]: I1205 14:03:23.805176 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-664869ddc-d4x9g_dbdfc62e-030d-47fb-bcd5-ea38da412eb6/proxy-server/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.002783 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-5js9g_de71f05a-e844-4d80-bd5b-2e4169a624c4/swift-ring-rebalance/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.071581 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-664869ddc-d4x9g_dbdfc62e-030d-47fb-bcd5-ea38da412eb6/proxy-httpd/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.201954 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-auditor/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.280287 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-reaper/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.351080 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-server/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.391513 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-replicator/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.424298 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-auditor/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.562101 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-replicator/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.782668 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-server/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.802616 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-updater/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.900804 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-auditor/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.934420 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-expirer/0.log" Dec 05 14:03:24 crc kubenswrapper[4784]: I1205 14:03:24.998612 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:03:24 crc kubenswrapper[4784]: E1205 14:03:24.999062 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.024079 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-replicator/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.127087 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/rsync/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.127702 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-server/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.167714 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-updater/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.223074 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/swift-recon-cron/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.458699 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2_3c4b4608-406d-431c-a042-bd54eb2643f9/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.466237 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_9dc746ad-99ec-4a42-8c05-3c45ece46906/tempest-tests-tempest-tests-runner/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.723938 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_a313521b-ac34-4e94-83a4-401c7e1acbbe/test-operator-logs-container/0.log" Dec 05 14:03:25 crc kubenswrapper[4784]: I1205 14:03:25.757847 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl_c4647929-264c-4fe3-b2ee-f543c25a50d0/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:03:26 crc kubenswrapper[4784]: I1205 14:03:26.781954 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_e88e0cbd-da67-4123-97dd-6840f902d9f1/watcher-applier/0.log" Dec 05 14:03:27 crc kubenswrapper[4784]: I1205 14:03:27.530570 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_91f043b4-34b6-413d-b8d2-25a247639c63/watcher-api-log/0.log" Dec 05 14:03:30 crc kubenswrapper[4784]: I1205 14:03:30.700255 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1/watcher-decision-engine/0.log" Dec 05 14:03:31 crc kubenswrapper[4784]: I1205 14:03:31.925092 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_a93486d9-05bf-44e5-9991-5ca89f117938/memcached/0.log" Dec 05 14:03:31 crc kubenswrapper[4784]: I1205 14:03:31.964217 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_91f043b4-34b6-413d-b8d2-25a247639c63/watcher-api/0.log" Dec 05 14:03:37 crc kubenswrapper[4784]: I1205 14:03:37.999378 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:03:38 crc kubenswrapper[4784]: E1205 14:03:38.000301 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:03:51 crc kubenswrapper[4784]: I1205 14:03:51.008092 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:03:51 crc kubenswrapper[4784]: E1205 14:03:51.009116 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.319674 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8ct4c_5434c275-5acc-4ffe-94ff-1cd9440300b0/kube-rbac-proxy/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.352520 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8ct4c_5434c275-5acc-4ffe-94ff-1cd9440300b0/manager/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.604160 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-grj7k_17a1e99d-2e27-47df-93be-afbb5224152b/kube-rbac-proxy/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.617097 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-grj7k_17a1e99d-2e27-47df-93be-afbb5224152b/manager/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.768523 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-g4cvf_cae1438b-c8fd-4660-8843-f41bca4b1e15/kube-rbac-proxy/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.810499 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-g4cvf_cae1438b-c8fd-4660-8843-f41bca4b1e15/manager/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.840353 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.991173 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:03:52 crc kubenswrapper[4784]: I1205 14:03:52.999533 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.007742 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.192846 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.211756 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.229336 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/extract/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.371984 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tkgxb_bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5/kube-rbac-proxy/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.456158 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tkgxb_bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5/manager/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.476350 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-lw4zg_34392862-6b0a-4e19-8702-d685378817b1/kube-rbac-proxy/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.598069 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-lw4zg_34392862-6b0a-4e19-8702-d685378817b1/manager/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.629863 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bk9hd_d14c7f23-4235-4257-a178-6b90aa4cf3b4/kube-rbac-proxy/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.662799 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bk9hd_d14c7f23-4235-4257-a178-6b90aa4cf3b4/manager/0.log" Dec 05 14:03:53 crc kubenswrapper[4784]: I1205 14:03:53.836701 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-6n77l_6c20830c-fef0-4691-9505-5d0c3726ca11/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.034114 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fhllr_bd95d9d9-a3b2-4f91-94f1-a60041b5b640/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.066152 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fhllr_bd95d9d9-a3b2-4f91-94f1-a60041b5b640/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.097485 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-6n77l_6c20830c-fef0-4691-9505-5d0c3726ca11/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.257713 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bstnz_253f2712-fbf0-476b-8ba3-387f7811e4f7/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.370111 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bstnz_253f2712-fbf0-476b-8ba3-387f7811e4f7/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.430754 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-fkjcs_70f6568a-d588-4d71-8e38-def379ac95cf/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.461337 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-fkjcs_70f6568a-d588-4d71-8e38-def379ac95cf/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.553253 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5z5pm_6200dbb3-7166-4fa0-925c-fe6155de2927/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.634531 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5z5pm_6200dbb3-7166-4fa0-925c-fe6155de2927/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.722575 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-n6sh9_d32fc3d7-6f1d-4f5c-8f70-39a417849b13/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.799834 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-n6sh9_d32fc3d7-6f1d-4f5c-8f70-39a417849b13/manager/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.907386 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-prd2m_af5e1f7a-185c-402f-80b7-fb6c66084d0f/kube-rbac-proxy/0.log" Dec 05 14:03:54 crc kubenswrapper[4784]: I1205 14:03:54.983410 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-prd2m_af5e1f7a-185c-402f-80b7-fb6c66084d0f/manager/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.112859 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-llgbj_52133bea-24d3-440b-880d-67a3131c52db/kube-rbac-proxy/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.167470 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-llgbj_52133bea-24d3-440b-880d-67a3131c52db/manager/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.303291 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4646jk_d2899908-ecd6-4e04-932d-f26909c0f547/kube-rbac-proxy/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.325533 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4646jk_d2899908-ecd6-4e04-932d-f26909c0f547/manager/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.769822 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64f95d469-ftlj7_96560d18-563e-4929-891e-4fb7c9a88619/operator/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.829847 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mzzgx_40db50d1-6180-45f5-9774-7ed6b6dbf490/registry-server/0.log" Dec 05 14:03:55 crc kubenswrapper[4784]: I1205 14:03:55.984232 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-2jxh6_2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad/kube-rbac-proxy/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.139546 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-2jxh6_2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad/manager/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.193066 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fs6fj_c31c0c1f-afa0-4ba8-a638-d27370864b63/kube-rbac-proxy/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.255407 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fs6fj_c31c0c1f-afa0-4ba8-a638-d27370864b63/manager/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.510612 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-mnptg_ce7167d9-e7f3-428e-bbcb-6879014ec908/operator/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.539460 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-2dt8t_d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5/kube-rbac-proxy/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.674868 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-2dt8t_d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5/manager/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.761795 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tc6cr_43833100-b1fd-45fd-b772-9d0ee036c4ce/kube-rbac-proxy/0.log" Dec 05 14:03:56 crc kubenswrapper[4784]: I1205 14:03:56.918503 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-j9wr9_2b3caeee-8e0e-4a20-9cea-f9f668e2a76f/kube-rbac-proxy/0.log" Dec 05 14:03:57 crc kubenswrapper[4784]: I1205 14:03:57.000224 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-j9wr9_2b3caeee-8e0e-4a20-9cea-f9f668e2a76f/manager/0.log" Dec 05 14:03:57 crc kubenswrapper[4784]: I1205 14:03:57.066853 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tc6cr_43833100-b1fd-45fd-b772-9d0ee036c4ce/manager/0.log" Dec 05 14:03:57 crc kubenswrapper[4784]: I1205 14:03:57.099232 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6f87cfd46c-qltwb_0a262894-4e21-4fe3-b216-b135bfb56d5b/manager/0.log" Dec 05 14:03:57 crc kubenswrapper[4784]: I1205 14:03:57.153245 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7b48476889-fdjfg_19094ce3-8926-4668-87b9-db8aac572e80/kube-rbac-proxy/0.log" Dec 05 14:03:57 crc kubenswrapper[4784]: I1205 14:03:57.250294 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7b48476889-fdjfg_19094ce3-8926-4668-87b9-db8aac572e80/manager/0.log" Dec 05 14:04:05 crc kubenswrapper[4784]: I1205 14:04:05.998654 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:04:06 crc kubenswrapper[4784]: E1205 14:04:05.999629 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:04:14 crc kubenswrapper[4784]: I1205 14:04:14.771395 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-shp28_4d076d7f-77aa-4e21-9189-80c39bc6147d/control-plane-machine-set-operator/0.log" Dec 05 14:04:14 crc kubenswrapper[4784]: I1205 14:04:14.884302 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kjvqd_1b35d831-6af3-41e4-a111-ebfb9fefb029/kube-rbac-proxy/0.log" Dec 05 14:04:14 crc kubenswrapper[4784]: I1205 14:04:14.978160 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kjvqd_1b35d831-6af3-41e4-a111-ebfb9fefb029/machine-api-operator/0.log" Dec 05 14:04:16 crc kubenswrapper[4784]: I1205 14:04:16.999521 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:04:17 crc kubenswrapper[4784]: E1205 14:04:17.000103 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:04:26 crc kubenswrapper[4784]: I1205 14:04:26.748788 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-5vsrc_7470ff9d-0206-41d0-b96c-b6618595be7a/cert-manager-controller/0.log" Dec 05 14:04:26 crc kubenswrapper[4784]: I1205 14:04:26.826820 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6fms9_90561a13-c4ba-4973-9e21-c96cbea6a0b2/cert-manager-cainjector/0.log" Dec 05 14:04:26 crc kubenswrapper[4784]: I1205 14:04:26.881444 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8brl6_4ae09fde-6000-4f2c-b9bf-ed200fcd83e5/cert-manager-webhook/0.log" Dec 05 14:04:28 crc kubenswrapper[4784]: I1205 14:04:27.999429 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:04:28 crc kubenswrapper[4784]: E1205 14:04:27.999763 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.387120 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-z58f9_c4e18387-6306-4f1c-8dd2-30cf9859dc6e/nmstate-console-plugin/0.log" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.575346 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-c9l7g_c005bc77-26a9-4402-abb3-8c16e17afb69/nmstate-handler/0.log" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.578698 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-96mtg_8a3d6fb3-6f9e-40b5-8de6-30f0588df3db/kube-rbac-proxy/0.log" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.642015 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-96mtg_8a3d6fb3-6f9e-40b5-8de6-30f0588df3db/nmstate-metrics/0.log" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.741256 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5zl2c_200a7d52-d7cb-4b5e-91a8-d03a2f181b01/nmstate-operator/0.log" Dec 05 14:04:38 crc kubenswrapper[4784]: I1205 14:04:38.834615 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-skw5g_081a21fa-325e-4018-9ce0-abc2bb1899ec/nmstate-webhook/0.log" Dec 05 14:04:42 crc kubenswrapper[4784]: I1205 14:04:42.998645 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:04:43 crc kubenswrapper[4784]: E1205 14:04:42.999479 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:04:53 crc kubenswrapper[4784]: I1205 14:04:53.411611 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-jgfs9_6bb837d7-0a54-4a2c-a943-70838b7b3d58/kube-rbac-proxy/0.log" Dec 05 14:04:53 crc kubenswrapper[4784]: I1205 14:04:53.584892 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-jgfs9_6bb837d7-0a54-4a2c-a943-70838b7b3d58/controller/0.log" Dec 05 14:04:53 crc kubenswrapper[4784]: I1205 14:04:53.615788 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-956ff_a65c75b7-3183-4839-a70e-d16e4776e89d/frr-k8s-webhook-server/0.log" Dec 05 14:04:53 crc kubenswrapper[4784]: I1205 14:04:53.791222 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.003535 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.025845 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.028520 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.072906 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.221534 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.253170 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.279535 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.288015 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.410295 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.428692 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.459732 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.518475 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/controller/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.628249 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/frr-metrics/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.676729 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/kube-rbac-proxy/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.721432 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/kube-rbac-proxy-frr/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.854651 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/reloader/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.993926 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54655dd747-gqpqv_c5573d49-4a27-4dbb-ba09-0a6a3306e365/manager/0.log" Dec 05 14:04:54 crc kubenswrapper[4784]: I1205 14:04:54.998715 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:04:54 crc kubenswrapper[4784]: E1205 14:04:54.998974 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:04:55 crc kubenswrapper[4784]: I1205 14:04:55.198093 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-849874cf66-slct4_27feea19-4a55-4d86-874a-60b62859a65c/webhook-server/0.log" Dec 05 14:04:55 crc kubenswrapper[4784]: I1205 14:04:55.331218 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cffk7_f4d31217-a7d3-490f-8bba-c9d8ca4c47ca/kube-rbac-proxy/0.log" Dec 05 14:04:55 crc kubenswrapper[4784]: I1205 14:04:55.926749 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cffk7_f4d31217-a7d3-490f-8bba-c9d8ca4c47ca/speaker/0.log" Dec 05 14:04:56 crc kubenswrapper[4784]: I1205 14:04:56.216127 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/frr/0.log" Dec 05 14:05:07 crc kubenswrapper[4784]: I1205 14:05:07.966357 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.086251 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.116715 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.161426 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.313225 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.322577 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/extract/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.324866 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.465940 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.663537 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.676648 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.685676 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.834036 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.839455 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:05:08 crc kubenswrapper[4784]: I1205 14:05:08.847609 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/extract/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.011440 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.205224 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.209431 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.230226 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.433951 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.439703 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/extract/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.489493 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.617287 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.758551 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.778841 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:05:09 crc kubenswrapper[4784]: I1205 14:05:09.810855 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.000147 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.002973 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.041634 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.320527 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7"} Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.443384 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.745956 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.830717 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/registry-server/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.853023 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.853825 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:05:10 crc kubenswrapper[4784]: I1205 14:05:10.968560 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.077686 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.172575 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/2.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.343230 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/1.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.417939 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.637513 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.679552 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.717430 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.899767 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/registry-server/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.938057 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:05:11 crc kubenswrapper[4784]: I1205 14:05:11.957368 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.109390 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.171674 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/registry-server/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.275270 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.287439 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.306698 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.463223 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:05:12 crc kubenswrapper[4784]: I1205 14:05:12.463967 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:05:13 crc kubenswrapper[4784]: I1205 14:05:13.493783 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/registry-server/0.log" Dec 05 14:05:24 crc kubenswrapper[4784]: I1205 14:05:24.860405 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-q5lxp_954cb856-d909-4541-89c7-7c38bf8d8618/prometheus-operator/0.log" Dec 05 14:05:25 crc kubenswrapper[4784]: I1205 14:05:25.000592 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848d96db67-6k7p8_573cb676-d704-4d0d-852c-582d38a64cdb/prometheus-operator-admission-webhook/0.log" Dec 05 14:05:25 crc kubenswrapper[4784]: I1205 14:05:25.034749 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848d96db67-869qk_e6ba5df2-9910-453c-9993-fca6642b4e8e/prometheus-operator-admission-webhook/0.log" Dec 05 14:05:25 crc kubenswrapper[4784]: I1205 14:05:25.238323 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-m29qp_bdf43736-9e51-4d7d-8290-075b7f058f62/operator/0.log" Dec 05 14:05:25 crc kubenswrapper[4784]: I1205 14:05:25.244925 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-6j8bm_a56fb787-b445-49b8-a50c-5cddf822fc68/perses-operator/0.log" Dec 05 14:07:27 crc kubenswrapper[4784]: I1205 14:07:27.807988 4784 generic.go:334] "Generic (PLEG): container finished" podID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerID="d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1" exitCode=0 Dec 05 14:07:27 crc kubenswrapper[4784]: I1205 14:07:27.808095 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" event={"ID":"7ea9afcc-76ac-4636-96ef-8b754926648c","Type":"ContainerDied","Data":"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1"} Dec 05 14:07:27 crc kubenswrapper[4784]: I1205 14:07:27.809464 4784 scope.go:117] "RemoveContainer" containerID="d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1" Dec 05 14:07:28 crc kubenswrapper[4784]: I1205 14:07:28.378992 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xqjjn_must-gather-gsxrd_7ea9afcc-76ac-4636-96ef-8b754926648c/gather/0.log" Dec 05 14:07:29 crc kubenswrapper[4784]: I1205 14:07:29.572526 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:07:29 crc kubenswrapper[4784]: I1205 14:07:29.572864 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:07:36 crc kubenswrapper[4784]: I1205 14:07:36.926659 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xqjjn/must-gather-gsxrd"] Dec 05 14:07:36 crc kubenswrapper[4784]: I1205 14:07:36.927390 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="copy" containerID="cri-o://be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985" gracePeriod=2 Dec 05 14:07:36 crc kubenswrapper[4784]: I1205 14:07:36.936629 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xqjjn/must-gather-gsxrd"] Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.436732 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xqjjn_must-gather-gsxrd_7ea9afcc-76ac-4636-96ef-8b754926648c/copy/0.log" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.437533 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.602624 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output\") pod \"7ea9afcc-76ac-4636-96ef-8b754926648c\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.603069 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgp4k\" (UniqueName: \"kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k\") pod \"7ea9afcc-76ac-4636-96ef-8b754926648c\" (UID: \"7ea9afcc-76ac-4636-96ef-8b754926648c\") " Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.608660 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k" (OuterVolumeSpecName: "kube-api-access-fgp4k") pod "7ea9afcc-76ac-4636-96ef-8b754926648c" (UID: "7ea9afcc-76ac-4636-96ef-8b754926648c"). InnerVolumeSpecName "kube-api-access-fgp4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.706389 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgp4k\" (UniqueName: \"kubernetes.io/projected/7ea9afcc-76ac-4636-96ef-8b754926648c-kube-api-access-fgp4k\") on node \"crc\" DevicePath \"\"" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.819657 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7ea9afcc-76ac-4636-96ef-8b754926648c" (UID: "7ea9afcc-76ac-4636-96ef-8b754926648c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.912262 4784 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7ea9afcc-76ac-4636-96ef-8b754926648c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.921086 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xqjjn_must-gather-gsxrd_7ea9afcc-76ac-4636-96ef-8b754926648c/copy/0.log" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.921635 4784 generic.go:334] "Generic (PLEG): container finished" podID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerID="be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985" exitCode=143 Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.921697 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xqjjn/must-gather-gsxrd" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.921731 4784 scope.go:117] "RemoveContainer" containerID="be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985" Dec 05 14:07:37 crc kubenswrapper[4784]: I1205 14:07:37.943946 4784 scope.go:117] "RemoveContainer" containerID="d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1" Dec 05 14:07:38 crc kubenswrapper[4784]: I1205 14:07:38.019608 4784 scope.go:117] "RemoveContainer" containerID="be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985" Dec 05 14:07:38 crc kubenswrapper[4784]: E1205 14:07:38.020158 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985\": container with ID starting with be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985 not found: ID does not exist" containerID="be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985" Dec 05 14:07:38 crc kubenswrapper[4784]: I1205 14:07:38.020223 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985"} err="failed to get container status \"be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985\": rpc error: code = NotFound desc = could not find container \"be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985\": container with ID starting with be9821a3e48abfd40e5cce7b5cdd86807c307245913e9ac14c4686e741c7e985 not found: ID does not exist" Dec 05 14:07:38 crc kubenswrapper[4784]: I1205 14:07:38.020254 4784 scope.go:117] "RemoveContainer" containerID="d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1" Dec 05 14:07:38 crc kubenswrapper[4784]: E1205 14:07:38.020588 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1\": container with ID starting with d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1 not found: ID does not exist" containerID="d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1" Dec 05 14:07:38 crc kubenswrapper[4784]: I1205 14:07:38.020615 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1"} err="failed to get container status \"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1\": rpc error: code = NotFound desc = could not find container \"d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1\": container with ID starting with d38c702afee5dad8af69932091d95d65cd4e65d292f5c89c40313da76cf96be1 not found: ID does not exist" Dec 05 14:07:39 crc kubenswrapper[4784]: I1205 14:07:39.020919 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" path="/var/lib/kubelet/pods/7ea9afcc-76ac-4636-96ef-8b754926648c/volumes" Dec 05 14:07:40 crc kubenswrapper[4784]: I1205 14:07:40.996294 4784 scope.go:117] "RemoveContainer" containerID="32b30fbc20d149ed33b1a825784ec1804fdbac85f0989fa3df44a4e58025a57c" Dec 05 14:07:59 crc kubenswrapper[4784]: I1205 14:07:59.573067 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:07:59 crc kubenswrapper[4784]: I1205 14:07:59.573640 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:08:29 crc kubenswrapper[4784]: I1205 14:08:29.572076 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:08:29 crc kubenswrapper[4784]: I1205 14:08:29.572616 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:08:29 crc kubenswrapper[4784]: I1205 14:08:29.572654 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 14:08:29 crc kubenswrapper[4784]: I1205 14:08:29.573489 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 14:08:29 crc kubenswrapper[4784]: I1205 14:08:29.573541 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7" gracePeriod=600 Dec 05 14:08:30 crc kubenswrapper[4784]: I1205 14:08:30.483228 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7" exitCode=0 Dec 05 14:08:30 crc kubenswrapper[4784]: I1205 14:08:30.483270 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7"} Dec 05 14:08:30 crc kubenswrapper[4784]: I1205 14:08:30.484147 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2"} Dec 05 14:08:30 crc kubenswrapper[4784]: I1205 14:08:30.484231 4784 scope.go:117] "RemoveContainer" containerID="142a049a6beda69ace9ae45109b3c32442d01e6b899428e3aae02b4842203f44" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.269341 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270130 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="registry-server" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270152 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="registry-server" Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270173 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="extract-utilities" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270181 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="extract-utilities" Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270238 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="copy" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270247 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="copy" Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270266 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="gather" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270274 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="gather" Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270288 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ca708c-2cce-4301-9bfc-5c869256aed5" containerName="container-00" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270296 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ca708c-2cce-4301-9bfc-5c869256aed5" containerName="container-00" Dec 05 14:08:32 crc kubenswrapper[4784]: E1205 14:08:32.270309 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="extract-content" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270317 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="extract-content" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270655 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="copy" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270675 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ea9afcc-76ac-4636-96ef-8b754926648c" containerName="gather" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270712 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="75ca708c-2cce-4301-9bfc-5c869256aed5" containerName="container-00" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.270739 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="cae07b75-3cce-4e2a-8632-c4af29d56bab" containerName="registry-server" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.272611 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.291577 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.352602 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.352695 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xqcc\" (UniqueName: \"kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.353091 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.455279 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.455363 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xqcc\" (UniqueName: \"kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.455446 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.455815 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.455837 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.479790 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xqcc\" (UniqueName: \"kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc\") pod \"redhat-operators-p9rpb\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:32 crc kubenswrapper[4784]: I1205 14:08:32.604173 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:33 crc kubenswrapper[4784]: I1205 14:08:33.056124 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:33 crc kubenswrapper[4784]: I1205 14:08:33.517213 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerID="627434df045658919a964520ffbeecb5e21471f8b80c9279de2eca436d9bf0f1" exitCode=0 Dec 05 14:08:33 crc kubenswrapper[4784]: I1205 14:08:33.517277 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerDied","Data":"627434df045658919a964520ffbeecb5e21471f8b80c9279de2eca436d9bf0f1"} Dec 05 14:08:33 crc kubenswrapper[4784]: I1205 14:08:33.517302 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerStarted","Data":"68cb0547a13d0739fb176af81fad50a43edc7cd220c9656012746d802965552c"} Dec 05 14:08:33 crc kubenswrapper[4784]: I1205 14:08:33.521622 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 14:08:34 crc kubenswrapper[4784]: I1205 14:08:34.531476 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerStarted","Data":"e051f2fd7c4342a664bb440a5086721c4479b62c2eccfa4faa42b1916e5b5d14"} Dec 05 14:08:36 crc kubenswrapper[4784]: I1205 14:08:36.553869 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerID="e051f2fd7c4342a664bb440a5086721c4479b62c2eccfa4faa42b1916e5b5d14" exitCode=0 Dec 05 14:08:36 crc kubenswrapper[4784]: I1205 14:08:36.553993 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerDied","Data":"e051f2fd7c4342a664bb440a5086721c4479b62c2eccfa4faa42b1916e5b5d14"} Dec 05 14:08:37 crc kubenswrapper[4784]: I1205 14:08:37.567627 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerStarted","Data":"8a1ed1ad52e3894f80d245dffa69366e18471d2396d1fb6e6f887dcd2a7a9088"} Dec 05 14:08:37 crc kubenswrapper[4784]: I1205 14:08:37.592859 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p9rpb" podStartSLOduration=2.034897475 podStartE2EDuration="5.592837612s" podCreationTimestamp="2025-12-05 14:08:32 +0000 UTC" firstStartedPulling="2025-12-05 14:08:33.521340494 +0000 UTC m=+6192.941407309" lastFinishedPulling="2025-12-05 14:08:37.079280631 +0000 UTC m=+6196.499347446" observedRunningTime="2025-12-05 14:08:37.583858373 +0000 UTC m=+6197.003925218" watchObservedRunningTime="2025-12-05 14:08:37.592837612 +0000 UTC m=+6197.012904447" Dec 05 14:08:41 crc kubenswrapper[4784]: I1205 14:08:41.086372 4784 scope.go:117] "RemoveContainer" containerID="ee2cfa62d4ebe9c98a876dbce7573fa68e8fc49c6c65400b5fc7a9c86f353c20" Dec 05 14:08:42 crc kubenswrapper[4784]: I1205 14:08:42.604601 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:42 crc kubenswrapper[4784]: I1205 14:08:42.605006 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:42 crc kubenswrapper[4784]: I1205 14:08:42.653036 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:42 crc kubenswrapper[4784]: I1205 14:08:42.704466 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:42 crc kubenswrapper[4784]: I1205 14:08:42.889912 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:44 crc kubenswrapper[4784]: I1205 14:08:44.657837 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p9rpb" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="registry-server" containerID="cri-o://8a1ed1ad52e3894f80d245dffa69366e18471d2396d1fb6e6f887dcd2a7a9088" gracePeriod=2 Dec 05 14:08:46 crc kubenswrapper[4784]: I1205 14:08:46.679561 4784 generic.go:334] "Generic (PLEG): container finished" podID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerID="8a1ed1ad52e3894f80d245dffa69366e18471d2396d1fb6e6f887dcd2a7a9088" exitCode=0 Dec 05 14:08:46 crc kubenswrapper[4784]: I1205 14:08:46.679646 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerDied","Data":"8a1ed1ad52e3894f80d245dffa69366e18471d2396d1fb6e6f887dcd2a7a9088"} Dec 05 14:08:46 crc kubenswrapper[4784]: I1205 14:08:46.969626 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.114577 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities\") pod \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.114997 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content\") pod \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.115227 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xqcc\" (UniqueName: \"kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc\") pod \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\" (UID: \"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793\") " Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.117954 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities" (OuterVolumeSpecName: "utilities") pod "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" (UID: "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.125142 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc" (OuterVolumeSpecName: "kube-api-access-7xqcc") pod "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" (UID: "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793"). InnerVolumeSpecName "kube-api-access-7xqcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.217324 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xqcc\" (UniqueName: \"kubernetes.io/projected/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-kube-api-access-7xqcc\") on node \"crc\" DevicePath \"\"" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.217365 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.232808 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" (UID: "2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.321460 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.698044 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p9rpb" event={"ID":"2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793","Type":"ContainerDied","Data":"68cb0547a13d0739fb176af81fad50a43edc7cd220c9656012746d802965552c"} Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.698109 4784 scope.go:117] "RemoveContainer" containerID="8a1ed1ad52e3894f80d245dffa69366e18471d2396d1fb6e6f887dcd2a7a9088" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.698336 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p9rpb" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.723799 4784 scope.go:117] "RemoveContainer" containerID="e051f2fd7c4342a664bb440a5086721c4479b62c2eccfa4faa42b1916e5b5d14" Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.746761 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.757697 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p9rpb"] Dec 05 14:08:47 crc kubenswrapper[4784]: I1205 14:08:47.770144 4784 scope.go:117] "RemoveContainer" containerID="627434df045658919a964520ffbeecb5e21471f8b80c9279de2eca436d9bf0f1" Dec 05 14:08:49 crc kubenswrapper[4784]: I1205 14:08:49.011325 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" path="/var/lib/kubelet/pods/2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793/volumes" Dec 05 14:10:29 crc kubenswrapper[4784]: I1205 14:10:29.573025 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:10:29 crc kubenswrapper[4784]: I1205 14:10:29.573577 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:10:59 crc kubenswrapper[4784]: I1205 14:10:59.572302 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:10:59 crc kubenswrapper[4784]: I1205 14:10:59.572849 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.964157 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5w2dk/must-gather-k4sxt"] Dec 05 14:11:00 crc kubenswrapper[4784]: E1205 14:11:00.965388 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="extract-content" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.965469 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="extract-content" Dec 05 14:11:00 crc kubenswrapper[4784]: E1205 14:11:00.965537 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="registry-server" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.965591 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="registry-server" Dec 05 14:11:00 crc kubenswrapper[4784]: E1205 14:11:00.965663 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="extract-utilities" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.965720 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="extract-utilities" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.965955 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f6ac88b-cf1c-4f7e-8f21-74eafcdb2793" containerName="registry-server" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.967086 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.969833 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5w2dk"/"kube-root-ca.crt" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.970247 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5w2dk"/"openshift-service-ca.crt" Dec 05 14:11:00 crc kubenswrapper[4784]: I1205 14:11:00.992703 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5w2dk/must-gather-k4sxt"] Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.101392 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.101686 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltx7c\" (UniqueName: \"kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.204161 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.204228 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltx7c\" (UniqueName: \"kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.204816 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.228127 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltx7c\" (UniqueName: \"kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c\") pod \"must-gather-k4sxt\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.287770 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:11:01 crc kubenswrapper[4784]: I1205 14:11:01.806959 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5w2dk/must-gather-k4sxt"] Dec 05 14:11:02 crc kubenswrapper[4784]: I1205 14:11:02.035984 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" event={"ID":"a7110865-6dae-444b-8077-bb2d8cb2fc60","Type":"ContainerStarted","Data":"0fb107f70b022ca2473d01bac5e279e7646adf770ba8f7cf3c4411ce55508851"} Dec 05 14:11:03 crc kubenswrapper[4784]: I1205 14:11:03.052686 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" event={"ID":"a7110865-6dae-444b-8077-bb2d8cb2fc60","Type":"ContainerStarted","Data":"75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f"} Dec 05 14:11:03 crc kubenswrapper[4784]: I1205 14:11:03.053008 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" event={"ID":"a7110865-6dae-444b-8077-bb2d8cb2fc60","Type":"ContainerStarted","Data":"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63"} Dec 05 14:11:05 crc kubenswrapper[4784]: I1205 14:11:05.914489 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" podStartSLOduration=5.914465431 podStartE2EDuration="5.914465431s" podCreationTimestamp="2025-12-05 14:11:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:11:03.074384704 +0000 UTC m=+6342.494451589" watchObservedRunningTime="2025-12-05 14:11:05.914465431 +0000 UTC m=+6345.334532256" Dec 05 14:11:05 crc kubenswrapper[4784]: I1205 14:11:05.925098 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-2pnxt"] Dec 05 14:11:05 crc kubenswrapper[4784]: I1205 14:11:05.926687 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:05 crc kubenswrapper[4784]: I1205 14:11:05.929729 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5w2dk"/"default-dockercfg-nhxxm" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.044438 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg2jf\" (UniqueName: \"kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.044852 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.148214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.148380 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.148604 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg2jf\" (UniqueName: \"kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.317250 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg2jf\" (UniqueName: \"kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf\") pod \"crc-debug-2pnxt\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: I1205 14:11:06.550215 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:06 crc kubenswrapper[4784]: W1205 14:11:06.598832 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf44fc51c_8407_4f44_9d3f_7b35fb235884.slice/crio-21401f4dd6992b5d76cd00e72999adef14f8ac247c309842c0ca57c12c42abb3 WatchSource:0}: Error finding container 21401f4dd6992b5d76cd00e72999adef14f8ac247c309842c0ca57c12c42abb3: Status 404 returned error can't find the container with id 21401f4dd6992b5d76cd00e72999adef14f8ac247c309842c0ca57c12c42abb3 Dec 05 14:11:07 crc kubenswrapper[4784]: I1205 14:11:07.092039 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" event={"ID":"f44fc51c-8407-4f44-9d3f-7b35fb235884","Type":"ContainerStarted","Data":"6d02705b191727c274813069c85dd7ffffb292180d2aed78f87932b9648ab2fe"} Dec 05 14:11:07 crc kubenswrapper[4784]: I1205 14:11:07.092531 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" event={"ID":"f44fc51c-8407-4f44-9d3f-7b35fb235884","Type":"ContainerStarted","Data":"21401f4dd6992b5d76cd00e72999adef14f8ac247c309842c0ca57c12c42abb3"} Dec 05 14:11:07 crc kubenswrapper[4784]: I1205 14:11:07.114618 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" podStartSLOduration=2.114602249 podStartE2EDuration="2.114602249s" podCreationTimestamp="2025-12-05 14:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:11:07.107433655 +0000 UTC m=+6346.527500470" watchObservedRunningTime="2025-12-05 14:11:07.114602249 +0000 UTC m=+6346.534669064" Dec 05 14:11:29 crc kubenswrapper[4784]: I1205 14:11:29.572277 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:11:29 crc kubenswrapper[4784]: I1205 14:11:29.572956 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:11:29 crc kubenswrapper[4784]: I1205 14:11:29.573010 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 14:11:29 crc kubenswrapper[4784]: I1205 14:11:29.573910 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 14:11:29 crc kubenswrapper[4784]: I1205 14:11:29.573977 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" gracePeriod=600 Dec 05 14:11:29 crc kubenswrapper[4784]: E1205 14:11:29.707985 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:11:30 crc kubenswrapper[4784]: I1205 14:11:30.315442 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" exitCode=0 Dec 05 14:11:30 crc kubenswrapper[4784]: I1205 14:11:30.315509 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2"} Dec 05 14:11:30 crc kubenswrapper[4784]: I1205 14:11:30.315695 4784 scope.go:117] "RemoveContainer" containerID="839c4e0f7c807db9b0c24926cf01651e85fd1b9bd288c1d8fa7731c0ce087aa7" Dec 05 14:11:30 crc kubenswrapper[4784]: I1205 14:11:30.316382 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:11:30 crc kubenswrapper[4784]: E1205 14:11:30.316710 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:11:43 crc kubenswrapper[4784]: I1205 14:11:43.002065 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:11:43 crc kubenswrapper[4784]: E1205 14:11:43.002954 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:11:47 crc kubenswrapper[4784]: I1205 14:11:47.483844 4784 generic.go:334] "Generic (PLEG): container finished" podID="f44fc51c-8407-4f44-9d3f-7b35fb235884" containerID="6d02705b191727c274813069c85dd7ffffb292180d2aed78f87932b9648ab2fe" exitCode=0 Dec 05 14:11:47 crc kubenswrapper[4784]: I1205 14:11:47.483916 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" event={"ID":"f44fc51c-8407-4f44-9d3f-7b35fb235884","Type":"ContainerDied","Data":"6d02705b191727c274813069c85dd7ffffb292180d2aed78f87932b9648ab2fe"} Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.614500 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.646147 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-2pnxt"] Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.658654 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-2pnxt"] Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.685902 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg2jf\" (UniqueName: \"kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf\") pod \"f44fc51c-8407-4f44-9d3f-7b35fb235884\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.686023 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host\") pod \"f44fc51c-8407-4f44-9d3f-7b35fb235884\" (UID: \"f44fc51c-8407-4f44-9d3f-7b35fb235884\") " Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.686245 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host" (OuterVolumeSpecName: "host") pod "f44fc51c-8407-4f44-9d3f-7b35fb235884" (UID: "f44fc51c-8407-4f44-9d3f-7b35fb235884"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.686962 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f44fc51c-8407-4f44-9d3f-7b35fb235884-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.697402 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf" (OuterVolumeSpecName: "kube-api-access-dg2jf") pod "f44fc51c-8407-4f44-9d3f-7b35fb235884" (UID: "f44fc51c-8407-4f44-9d3f-7b35fb235884"). InnerVolumeSpecName "kube-api-access-dg2jf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:11:48 crc kubenswrapper[4784]: I1205 14:11:48.789119 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg2jf\" (UniqueName: \"kubernetes.io/projected/f44fc51c-8407-4f44-9d3f-7b35fb235884-kube-api-access-dg2jf\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.013282 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f44fc51c-8407-4f44-9d3f-7b35fb235884" path="/var/lib/kubelet/pods/f44fc51c-8407-4f44-9d3f-7b35fb235884/volumes" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.505022 4784 scope.go:117] "RemoveContainer" containerID="6d02705b191727c274813069c85dd7ffffb292180d2aed78f87932b9648ab2fe" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.505169 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-2pnxt" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.859000 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-96qfx"] Dec 05 14:11:49 crc kubenswrapper[4784]: E1205 14:11:49.862818 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f44fc51c-8407-4f44-9d3f-7b35fb235884" containerName="container-00" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.862849 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="f44fc51c-8407-4f44-9d3f-7b35fb235884" containerName="container-00" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.863070 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="f44fc51c-8407-4f44-9d3f-7b35fb235884" containerName="container-00" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.863902 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:49 crc kubenswrapper[4784]: I1205 14:11:49.867936 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5w2dk"/"default-dockercfg-nhxxm" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.021402 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.021792 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqj27\" (UniqueName: \"kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.124041 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqj27\" (UniqueName: \"kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.124214 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.124403 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.145061 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqj27\" (UniqueName: \"kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27\") pod \"crc-debug-96qfx\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.187170 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.514829 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" event={"ID":"20dfa470-4c55-40f5-aa6c-482b9a04b07c","Type":"ContainerStarted","Data":"823dd0b7d810f5cf6155288cb86ba47e939abad6dd9bf6b3fa0cfa792dade736"} Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.515135 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" event={"ID":"20dfa470-4c55-40f5-aa6c-482b9a04b07c","Type":"ContainerStarted","Data":"e1efa0f2c588da97b7afcf99c5a060c1f5de76b94be9277faf6ab5c5338dd73a"} Dec 05 14:11:50 crc kubenswrapper[4784]: I1205 14:11:50.536821 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" podStartSLOduration=1.536798608 podStartE2EDuration="1.536798608s" podCreationTimestamp="2025-12-05 14:11:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:11:50.52722633 +0000 UTC m=+6389.947293145" watchObservedRunningTime="2025-12-05 14:11:50.536798608 +0000 UTC m=+6389.956865423" Dec 05 14:11:51 crc kubenswrapper[4784]: I1205 14:11:51.532617 4784 generic.go:334] "Generic (PLEG): container finished" podID="20dfa470-4c55-40f5-aa6c-482b9a04b07c" containerID="823dd0b7d810f5cf6155288cb86ba47e939abad6dd9bf6b3fa0cfa792dade736" exitCode=0 Dec 05 14:11:51 crc kubenswrapper[4784]: I1205 14:11:51.532876 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" event={"ID":"20dfa470-4c55-40f5-aa6c-482b9a04b07c","Type":"ContainerDied","Data":"823dd0b7d810f5cf6155288cb86ba47e939abad6dd9bf6b3fa0cfa792dade736"} Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.697367 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.775916 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqj27\" (UniqueName: \"kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27\") pod \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.775976 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host\") pod \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\" (UID: \"20dfa470-4c55-40f5-aa6c-482b9a04b07c\") " Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.776950 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host" (OuterVolumeSpecName: "host") pod "20dfa470-4c55-40f5-aa6c-482b9a04b07c" (UID: "20dfa470-4c55-40f5-aa6c-482b9a04b07c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.786427 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27" (OuterVolumeSpecName: "kube-api-access-jqj27") pod "20dfa470-4c55-40f5-aa6c-482b9a04b07c" (UID: "20dfa470-4c55-40f5-aa6c-482b9a04b07c"). InnerVolumeSpecName "kube-api-access-jqj27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.878833 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqj27\" (UniqueName: \"kubernetes.io/projected/20dfa470-4c55-40f5-aa6c-482b9a04b07c-kube-api-access-jqj27\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:52 crc kubenswrapper[4784]: I1205 14:11:52.878867 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/20dfa470-4c55-40f5-aa6c-482b9a04b07c-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:53 crc kubenswrapper[4784]: I1205 14:11:53.024083 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-96qfx"] Dec 05 14:11:53 crc kubenswrapper[4784]: I1205 14:11:53.039966 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-96qfx"] Dec 05 14:11:53 crc kubenswrapper[4784]: I1205 14:11:53.558115 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-96qfx" Dec 05 14:11:53 crc kubenswrapper[4784]: I1205 14:11:53.558995 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1efa0f2c588da97b7afcf99c5a060c1f5de76b94be9277faf6ab5c5338dd73a" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.208322 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-btz7r"] Dec 05 14:11:54 crc kubenswrapper[4784]: E1205 14:11:54.209074 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20dfa470-4c55-40f5-aa6c-482b9a04b07c" containerName="container-00" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.209086 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="20dfa470-4c55-40f5-aa6c-482b9a04b07c" containerName="container-00" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.209341 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="20dfa470-4c55-40f5-aa6c-482b9a04b07c" containerName="container-00" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.210000 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.212044 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5w2dk"/"default-dockercfg-nhxxm" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.332819 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5wsn\" (UniqueName: \"kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.332898 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.434480 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5wsn\" (UniqueName: \"kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.434565 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.434705 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.455009 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5wsn\" (UniqueName: \"kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn\") pod \"crc-debug-btz7r\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: I1205 14:11:54.528412 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:54 crc kubenswrapper[4784]: W1205 14:11:54.583278 4784 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26dba194_96b9_4bea_b94f_0b917a324310.slice/crio-f6bdcc4d245777360b69bfe5daabc1f99e0acb6a7c831d99b2eb13ba5abe4de4 WatchSource:0}: Error finding container f6bdcc4d245777360b69bfe5daabc1f99e0acb6a7c831d99b2eb13ba5abe4de4: Status 404 returned error can't find the container with id f6bdcc4d245777360b69bfe5daabc1f99e0acb6a7c831d99b2eb13ba5abe4de4 Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.011434 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20dfa470-4c55-40f5-aa6c-482b9a04b07c" path="/var/lib/kubelet/pods/20dfa470-4c55-40f5-aa6c-482b9a04b07c/volumes" Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.592282 4784 generic.go:334] "Generic (PLEG): container finished" podID="26dba194-96b9-4bea-b94f-0b917a324310" containerID="dc2a172e92516be7a96e437cfc98bc70fb5bd4093670c3b6b03eb8836d8927f5" exitCode=0 Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.592323 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" event={"ID":"26dba194-96b9-4bea-b94f-0b917a324310","Type":"ContainerDied","Data":"dc2a172e92516be7a96e437cfc98bc70fb5bd4093670c3b6b03eb8836d8927f5"} Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.592350 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" event={"ID":"26dba194-96b9-4bea-b94f-0b917a324310","Type":"ContainerStarted","Data":"f6bdcc4d245777360b69bfe5daabc1f99e0acb6a7c831d99b2eb13ba5abe4de4"} Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.635861 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-btz7r"] Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.668817 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5w2dk/crc-debug-btz7r"] Dec 05 14:11:55 crc kubenswrapper[4784]: I1205 14:11:55.998663 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:11:55 crc kubenswrapper[4784]: E1205 14:11:55.999211 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.722123 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.886948 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5wsn\" (UniqueName: \"kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn\") pod \"26dba194-96b9-4bea-b94f-0b917a324310\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.887038 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host\") pod \"26dba194-96b9-4bea-b94f-0b917a324310\" (UID: \"26dba194-96b9-4bea-b94f-0b917a324310\") " Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.887196 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host" (OuterVolumeSpecName: "host") pod "26dba194-96b9-4bea-b94f-0b917a324310" (UID: "26dba194-96b9-4bea-b94f-0b917a324310"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.887711 4784 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26dba194-96b9-4bea-b94f-0b917a324310-host\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.899564 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn" (OuterVolumeSpecName: "kube-api-access-c5wsn") pod "26dba194-96b9-4bea-b94f-0b917a324310" (UID: "26dba194-96b9-4bea-b94f-0b917a324310"). InnerVolumeSpecName "kube-api-access-c5wsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:11:56 crc kubenswrapper[4784]: I1205 14:11:56.989322 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5wsn\" (UniqueName: \"kubernetes.io/projected/26dba194-96b9-4bea-b94f-0b917a324310-kube-api-access-c5wsn\") on node \"crc\" DevicePath \"\"" Dec 05 14:11:57 crc kubenswrapper[4784]: I1205 14:11:57.009367 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26dba194-96b9-4bea-b94f-0b917a324310" path="/var/lib/kubelet/pods/26dba194-96b9-4bea-b94f-0b917a324310/volumes" Dec 05 14:11:57 crc kubenswrapper[4784]: I1205 14:11:57.615140 4784 scope.go:117] "RemoveContainer" containerID="dc2a172e92516be7a96e437cfc98bc70fb5bd4093670c3b6b03eb8836d8927f5" Dec 05 14:11:57 crc kubenswrapper[4784]: I1205 14:11:57.615472 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/crc-debug-btz7r" Dec 05 14:12:11 crc kubenswrapper[4784]: I1205 14:12:10.999894 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:12:11 crc kubenswrapper[4784]: E1205 14:12:11.000864 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:12:21 crc kubenswrapper[4784]: I1205 14:12:21.999165 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:12:22 crc kubenswrapper[4784]: E1205 14:12:22.001920 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.182471 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b64cff454-t47d4_20cab8a1-167e-4a61-9de1-dbca99fc6978/barbican-api/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.246087 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7b64cff454-t47d4_20cab8a1-167e-4a61-9de1-dbca99fc6978/barbican-api-log/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.362736 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5866d77f58-dcc8l_d1f830fd-3c91-4985-ac6f-96314a74acc1/barbican-keystone-listener/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.428016 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5866d77f58-dcc8l_d1f830fd-3c91-4985-ac6f-96314a74acc1/barbican-keystone-listener-log/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.570864 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-655f48fc8f-k55s9_b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2/barbican-worker/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.626062 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-655f48fc8f-k55s9_b7fd6fd1-2f61-44a4-b8b7-9f1c38768db2/barbican-worker-log/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.769297 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kphkn_345a4940-4998-4cbc-bd5e-89bea1eec60b/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:31 crc kubenswrapper[4784]: I1205 14:12:31.924311 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/ceilometer-central-agent/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.040738 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/ceilometer-notification-agent/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.041971 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/proxy-httpd/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.064587 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_435a1cb2-bd53-4b3f-906d-7fc3de9553fb/sg-core/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.276049 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_630347d3-27c8-4ef8-8bc4-f06ff57474ed/cinder-api-log/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.611398 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_9db8a5e9-71b5-49aa-a45d-1361d3a021c9/probe/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.844312 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_9db8a5e9-71b5-49aa-a45d-1361d3a021c9/cinder-backup/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.871889 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7543eb24-2b15-498b-b447-9f1f47fef1f0/cinder-scheduler/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.926755 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_7543eb24-2b15-498b-b447-9f1f47fef1f0/probe/0.log" Dec 05 14:12:32 crc kubenswrapper[4784]: I1205 14:12:32.943292 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_630347d3-27c8-4ef8-8bc4-f06ff57474ed/cinder-api/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.201252 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed/probe/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.307237 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_3d4dfc19-7b89-4ef4-9ead-18c78bbf69ed/cinder-volume/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.512167 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_60971449-2443-4cba-90d2-7d1c6ba8acdd/probe/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.520270 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_60971449-2443-4cba-90d2-7d1c6ba8acdd/cinder-volume/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.558128 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-mh867_face2e9b-424c-4b68-8b2d-8f00b1e79256/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.717876 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-m5gm5_b0194359-b6ce-4590-b835-c81b0c992ca1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.776374 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/init/0.log" Dec 05 14:12:33 crc kubenswrapper[4784]: I1205 14:12:33.983370 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/init/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.070260 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tx7xw_a134d50c-87cd-4225-b873-1c6b1d2a0151/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.146530 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5fb487c899-nw4wf_4d1b8599-38bc-4f76-aaa6-4a18929bffba/dnsmasq-dns/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.263396 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_60863f45-1c7b-4e86-8782-aece4b178edb/glance-log/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.284060 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_60863f45-1c7b-4e86-8782-aece4b178edb/glance-httpd/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.465946 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_4f0ebe85-0cf4-4cbf-9b72-1561ca313666/glance-httpd/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.487673 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_4f0ebe85-0cf4-4cbf-9b72-1561ca313666/glance-log/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.651125 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cdb6b7d4-mvtql_5eba67d7-3c83-47c9-bdc2-0946f5839efd/horizon/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.736424 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-z4gv5_8021b39a-1235-4fb9-8ef4-ae1ff51e7835/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.888302 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-kphz8_db4f2cc1-d1a2-42af-a45b-04e866b92d97/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:34 crc kubenswrapper[4784]: I1205 14:12:34.999291 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:12:34 crc kubenswrapper[4784]: E1205 14:12:34.999590 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.255823 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415661-ckq8x_0090d182-b58b-4c0b-83b0-82ce94675e65/keystone-cron/0.log" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.487680 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415721-9jftk_22fe4207-5d79-42c8-b6fc-b0a0539d17bf/keystone-cron/0.log" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.673904 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cdb6b7d4-mvtql_5eba67d7-3c83-47c9-bdc2-0946f5839efd/horizon-log/0.log" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.692019 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6df6c9b849-hzswf_74bc0f27-17f2-4980-8c67-3a980c2e267d/keystone-api/0.log" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.719106 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_20314f76-fd12-4756-96b9-88485d32d3e0/kube-state-metrics/0.log" Dec 05 14:12:35 crc kubenswrapper[4784]: I1205 14:12:35.788618 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-r4w6d_ba1cfa9d-6665-4a66-a134-28fae26e36a2/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:36 crc kubenswrapper[4784]: I1205 14:12:36.218503 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-k4qts_ab97b4b4-1696-43ea-b462-56bcd34dda98/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:36 crc kubenswrapper[4784]: I1205 14:12:36.275219 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6bc87b8895-m5b7r_c4c57012-5781-4940-9551-6a53e2f9fad3/neutron-httpd/0.log" Dec 05 14:12:36 crc kubenswrapper[4784]: I1205 14:12:36.389571 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6bc87b8895-m5b7r_c4c57012-5781-4940-9551-6a53e2f9fad3/neutron-api/0.log" Dec 05 14:12:37 crc kubenswrapper[4784]: I1205 14:12:37.014927 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_306ca66e-9ffa-49fd-b2ad-1021c24fa070/nova-cell0-conductor-conductor/0.log" Dec 05 14:12:37 crc kubenswrapper[4784]: I1205 14:12:37.315042 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2f743e41-0208-45ae-940c-104f0c9442ba/nova-cell1-conductor-conductor/0.log" Dec 05 14:12:37 crc kubenswrapper[4784]: I1205 14:12:37.617650 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_8b5aa1c7-bf1d-49a6-ad9a-739c5dac7550/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 14:12:37 crc kubenswrapper[4784]: I1205 14:12:37.865172 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e693ebe6-ec98-4906-9a85-25a5a8a3c871/nova-api-log/0.log" Dec 05 14:12:37 crc kubenswrapper[4784]: I1205 14:12:37.901268 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-m6kfz_73ebab61-4062-476d-84bc-1013b097d5ac/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.162424 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e099c7b6-c61f-4426-a17a-ca13ca695a1e/nova-metadata-log/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.305094 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e693ebe6-ec98-4906-9a85-25a5a8a3c871/nova-api-api/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.536448 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/mysql-bootstrap/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.628232 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/mysql-bootstrap/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.750032 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_df7dbd5c-eb86-4431-8cdd-59b57dcfc381/nova-scheduler-scheduler/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.783315 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_62eaeb31-76a0-4f2b-9bbe-b00f25a620e3/galera/0.log" Dec 05 14:12:38 crc kubenswrapper[4784]: I1205 14:12:38.993449 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/mysql-bootstrap/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.182050 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/mysql-bootstrap/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.194514 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2142f1ca-e4be-48fc-94b9-12d5f7737366/galera/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.398879 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a90b38e9-d09e-4f72-9d73-85c2226e4049/openstackclient/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.449537 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ljc2l_938888bc-6cef-410e-b517-9fdb0c824405/ovn-controller/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.685258 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sjfhv_646c01bd-0f76-4fbc-aae4-9d679cde5796/openstack-network-exporter/0.log" Dec 05 14:12:39 crc kubenswrapper[4784]: I1205 14:12:39.828799 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server-init/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.074897 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server-init/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.075119 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovsdb-server/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.328407 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qhm79_8014c4e6-3539-4d7f-95c3-bb37c4a1e08e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.502284 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_961dcf80-20f3-48f5-818b-2c497ce58e01/openstack-network-exporter/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.509817 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6s6n5_8762021a-ee57-4a56-b752-da1d808ca0ff/ovs-vswitchd/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.674720 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_961dcf80-20f3-48f5-818b-2c497ce58e01/ovn-northd/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.734701 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3af95a38-f2ad-44f7-a99d-77d48faa79f8/openstack-network-exporter/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.758297 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e099c7b6-c61f-4426-a17a-ca13ca695a1e/nova-metadata-metadata/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.935437 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de4ea412-229e-4e53-97ff-86a923c47aac/openstack-network-exporter/0.log" Dec 05 14:12:40 crc kubenswrapper[4784]: I1205 14:12:40.951704 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3af95a38-f2ad-44f7-a99d-77d48faa79f8/ovsdbserver-nb/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.013447 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de4ea412-229e-4e53-97ff-86a923c47aac/ovsdbserver-sb/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.390134 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59c9dd888d-55zdv_e09b58a6-8baa-4c70-92dc-f54061239d1b/placement-api/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.446436 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/init-config-reloader/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.539553 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59c9dd888d-55zdv_e09b58a6-8baa-4c70-92dc-f54061239d1b/placement-log/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.635482 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/config-reloader/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.637272 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/prometheus/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.642027 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/init-config-reloader/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.747856 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f331c141-708a-4f4a-b0fa-e2cfcb1a7bed/thanos-sidecar/0.log" Dec 05 14:12:41 crc kubenswrapper[4784]: I1205 14:12:41.850144 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.064163 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.067063 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_94f3bf83-4b17-4dbc-aed9-b0541983c0b8/rabbitmq/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.163871 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.371292 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.387246 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_0a051f14-c8d2-4d57-95a9-9be7c46f9031/rabbitmq/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.475974 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.670714 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/setup-container/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.727917 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1a3bb70f-0aad-4f14-809e-1f39b78c97b8/rabbitmq/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.773675 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dbnnx_5f5f986e-311c-41da-aae4-18d6f3520749/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:42 crc kubenswrapper[4784]: I1205 14:12:42.925395 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-djr9k_e26d5696-c749-46ed-9f75-f07d0c46c076/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.034681 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-dmmxx_ced78d37-0ef6-4a75-903d-7db8946f38f4/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.165966 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-rk64x_238cfc14-62ab-498c-acc8-ec79cea43fa8/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.344048 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-wzblc_304b4ee8-7619-47a1-970d-5fbeb6c24e96/ssh-known-hosts-edpm-deployment/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.560796 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-664869ddc-d4x9g_dbdfc62e-030d-47fb-bcd5-ea38da412eb6/proxy-server/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.673849 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-5js9g_de71f05a-e844-4d80-bd5b-2e4169a624c4/swift-ring-rebalance/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.838344 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-auditor/0.log" Dec 05 14:12:43 crc kubenswrapper[4784]: I1205 14:12:43.869905 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-reaper/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.037499 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-server/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.067599 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-auditor/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.229813 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/account-replicator/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.246651 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-server/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.273895 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-replicator/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.301531 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-664869ddc-d4x9g_dbdfc62e-030d-47fb-bcd5-ea38da412eb6/proxy-httpd/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.464197 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-expirer/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.483335 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/container-updater/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.563045 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-auditor/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.565099 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-replicator/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.696910 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-server/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.709840 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/object-updater/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.755631 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/swift-recon-cron/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.762157 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f34e93a8-02d9-44ef-a18e-13ce24c3f9a6/rsync/0.log" Dec 05 14:12:44 crc kubenswrapper[4784]: I1205 14:12:44.997397 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-q4cz2_3c4b4608-406d-431c-a042-bd54eb2643f9/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:45 crc kubenswrapper[4784]: I1205 14:12:45.048161 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_9dc746ad-99ec-4a42-8c05-3c45ece46906/tempest-tests-tempest-tests-runner/0.log" Dec 05 14:12:45 crc kubenswrapper[4784]: I1205 14:12:45.227328 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_a313521b-ac34-4e94-83a4-401c7e1acbbe/test-operator-logs-container/0.log" Dec 05 14:12:45 crc kubenswrapper[4784]: I1205 14:12:45.297827 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-xdxzl_c4647929-264c-4fe3-b2ee-f543c25a50d0/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 14:12:46 crc kubenswrapper[4784]: I1205 14:12:46.181456 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_e88e0cbd-da67-4123-97dd-6840f902d9f1/watcher-applier/0.log" Dec 05 14:12:46 crc kubenswrapper[4784]: I1205 14:12:46.998506 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:12:46 crc kubenswrapper[4784]: E1205 14:12:46.999054 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:12:47 crc kubenswrapper[4784]: I1205 14:12:47.028858 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_91f043b4-34b6-413d-b8d2-25a247639c63/watcher-api-log/0.log" Dec 05 14:12:49 crc kubenswrapper[4784]: I1205 14:12:49.541796 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_7dd4bdfd-f163-412e-81a1-ee0d8a2c6aa1/watcher-decision-engine/0.log" Dec 05 14:12:51 crc kubenswrapper[4784]: I1205 14:12:51.078549 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_91f043b4-34b6-413d-b8d2-25a247639c63/watcher-api/0.log" Dec 05 14:12:54 crc kubenswrapper[4784]: I1205 14:12:54.445250 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_a93486d9-05bf-44e5-9991-5ca89f117938/memcached/0.log" Dec 05 14:12:59 crc kubenswrapper[4784]: I1205 14:12:59.998956 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:13:00 crc kubenswrapper[4784]: E1205 14:12:59.999727 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:13:11 crc kubenswrapper[4784]: I1205 14:13:11.999383 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:13:12 crc kubenswrapper[4784]: E1205 14:13:12.000351 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:13:12 crc kubenswrapper[4784]: I1205 14:13:12.571575 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8ct4c_5434c275-5acc-4ffe-94ff-1cd9440300b0/kube-rbac-proxy/0.log" Dec 05 14:13:12 crc kubenswrapper[4784]: I1205 14:13:12.676629 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8ct4c_5434c275-5acc-4ffe-94ff-1cd9440300b0/manager/0.log" Dec 05 14:13:12 crc kubenswrapper[4784]: I1205 14:13:12.830790 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-grj7k_17a1e99d-2e27-47df-93be-afbb5224152b/kube-rbac-proxy/0.log" Dec 05 14:13:12 crc kubenswrapper[4784]: I1205 14:13:12.899893 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-grj7k_17a1e99d-2e27-47df-93be-afbb5224152b/manager/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.014821 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-g4cvf_cae1438b-c8fd-4660-8843-f41bca4b1e15/kube-rbac-proxy/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.077530 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-g4cvf_cae1438b-c8fd-4660-8843-f41bca4b1e15/manager/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.162630 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.310037 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.345884 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.360719 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.528383 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/extract/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.534989 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/pull/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.548056 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_f6f25c485af4fc88bfa2645e771681c7222de14af05b8289cbd0e3dd6a2hhj6_01816a41-d9a5-4b78-b5b5-553a33adb5d9/util/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.722298 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tkgxb_bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5/kube-rbac-proxy/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.772897 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-tkgxb_bfa95d2c-7e0c-4a2e-8942-03eb8dfddbd5/manager/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.782749 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-lw4zg_34392862-6b0a-4e19-8702-d685378817b1/kube-rbac-proxy/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.959372 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bk9hd_d14c7f23-4235-4257-a178-6b90aa4cf3b4/kube-rbac-proxy/0.log" Dec 05 14:13:13 crc kubenswrapper[4784]: I1205 14:13:13.960584 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-lw4zg_34392862-6b0a-4e19-8702-d685378817b1/manager/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.040130 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-bk9hd_d14c7f23-4235-4257-a178-6b90aa4cf3b4/manager/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.159840 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-6n77l_6c20830c-fef0-4691-9505-5d0c3726ca11/kube-rbac-proxy/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.340629 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fhllr_bd95d9d9-a3b2-4f91-94f1-a60041b5b640/kube-rbac-proxy/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.372977 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-6n77l_6c20830c-fef0-4691-9505-5d0c3726ca11/manager/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.432339 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fhllr_bd95d9d9-a3b2-4f91-94f1-a60041b5b640/manager/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.533220 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bstnz_253f2712-fbf0-476b-8ba3-387f7811e4f7/kube-rbac-proxy/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.608962 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bstnz_253f2712-fbf0-476b-8ba3-387f7811e4f7/manager/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.655866 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-fkjcs_70f6568a-d588-4d71-8e38-def379ac95cf/kube-rbac-proxy/0.log" Dec 05 14:13:14 crc kubenswrapper[4784]: I1205 14:13:14.717131 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-fkjcs_70f6568a-d588-4d71-8e38-def379ac95cf/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.030763 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5z5pm_6200dbb3-7166-4fa0-925c-fe6155de2927/kube-rbac-proxy/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.068915 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5z5pm_6200dbb3-7166-4fa0-925c-fe6155de2927/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.203935 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-n6sh9_d32fc3d7-6f1d-4f5c-8f70-39a417849b13/kube-rbac-proxy/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.302295 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-n6sh9_d32fc3d7-6f1d-4f5c-8f70-39a417849b13/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.343083 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-prd2m_af5e1f7a-185c-402f-80b7-fb6c66084d0f/kube-rbac-proxy/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.457692 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-prd2m_af5e1f7a-185c-402f-80b7-fb6c66084d0f/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.523757 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-llgbj_52133bea-24d3-440b-880d-67a3131c52db/kube-rbac-proxy/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.563732 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-llgbj_52133bea-24d3-440b-880d-67a3131c52db/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.678867 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4646jk_d2899908-ecd6-4e04-932d-f26909c0f547/manager/0.log" Dec 05 14:13:15 crc kubenswrapper[4784]: I1205 14:13:15.706736 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4646jk_d2899908-ecd6-4e04-932d-f26909c0f547/kube-rbac-proxy/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.173869 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64f95d469-ftlj7_96560d18-563e-4929-891e-4fb7c9a88619/operator/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.182484 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mzzgx_40db50d1-6180-45f5-9774-7ed6b6dbf490/registry-server/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.388302 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-2jxh6_2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad/kube-rbac-proxy/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.466053 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-2jxh6_2e6a7f03-9a79-4c8f-8dfd-c1f4e6e3c2ad/manager/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.614657 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fs6fj_c31c0c1f-afa0-4ba8-a638-d27370864b63/kube-rbac-proxy/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.747132 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fs6fj_c31c0c1f-afa0-4ba8-a638-d27370864b63/manager/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.820079 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-mnptg_ce7167d9-e7f3-428e-bbcb-6879014ec908/operator/0.log" Dec 05 14:13:16 crc kubenswrapper[4784]: I1205 14:13:16.999013 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-2dt8t_d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5/kube-rbac-proxy/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.070303 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-2dt8t_d725fa2f-4d6e-47b2-aa2d-1757fcef4ad5/manager/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.113451 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tc6cr_43833100-b1fd-45fd-b772-9d0ee036c4ce/kube-rbac-proxy/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.195137 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6f87cfd46c-qltwb_0a262894-4e21-4fe3-b216-b135bfb56d5b/manager/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.306679 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-j9wr9_2b3caeee-8e0e-4a20-9cea-f9f668e2a76f/kube-rbac-proxy/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.366331 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-j9wr9_2b3caeee-8e0e-4a20-9cea-f9f668e2a76f/manager/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.379048 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tc6cr_43833100-b1fd-45fd-b772-9d0ee036c4ce/manager/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.510512 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7b48476889-fdjfg_19094ce3-8926-4668-87b9-db8aac572e80/kube-rbac-proxy/0.log" Dec 05 14:13:17 crc kubenswrapper[4784]: I1205 14:13:17.592164 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7b48476889-fdjfg_19094ce3-8926-4668-87b9-db8aac572e80/manager/0.log" Dec 05 14:13:22 crc kubenswrapper[4784]: I1205 14:13:22.999570 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:13:23 crc kubenswrapper[4784]: E1205 14:13:23.000302 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:13:34 crc kubenswrapper[4784]: I1205 14:13:34.594382 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-shp28_4d076d7f-77aa-4e21-9189-80c39bc6147d/control-plane-machine-set-operator/0.log" Dec 05 14:13:34 crc kubenswrapper[4784]: I1205 14:13:34.770901 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kjvqd_1b35d831-6af3-41e4-a111-ebfb9fefb029/machine-api-operator/0.log" Dec 05 14:13:34 crc kubenswrapper[4784]: I1205 14:13:34.810927 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-kjvqd_1b35d831-6af3-41e4-a111-ebfb9fefb029/kube-rbac-proxy/0.log" Dec 05 14:13:34 crc kubenswrapper[4784]: I1205 14:13:34.998821 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:13:34 crc kubenswrapper[4784]: E1205 14:13:34.999272 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:13:46 crc kubenswrapper[4784]: I1205 14:13:46.321575 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-5vsrc_7470ff9d-0206-41d0-b96c-b6618595be7a/cert-manager-controller/0.log" Dec 05 14:13:46 crc kubenswrapper[4784]: I1205 14:13:46.465064 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6fms9_90561a13-c4ba-4973-9e21-c96cbea6a0b2/cert-manager-cainjector/0.log" Dec 05 14:13:46 crc kubenswrapper[4784]: I1205 14:13:46.519863 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8brl6_4ae09fde-6000-4f2c-b9bf-ed200fcd83e5/cert-manager-webhook/0.log" Dec 05 14:13:50 crc kubenswrapper[4784]: I1205 14:13:49.999748 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:13:50 crc kubenswrapper[4784]: E1205 14:13:50.000433 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:13:57 crc kubenswrapper[4784]: I1205 14:13:57.979930 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-z58f9_c4e18387-6306-4f1c-8dd2-30cf9859dc6e/nmstate-console-plugin/0.log" Dec 05 14:13:58 crc kubenswrapper[4784]: I1205 14:13:58.134563 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-c9l7g_c005bc77-26a9-4402-abb3-8c16e17afb69/nmstate-handler/0.log" Dec 05 14:13:58 crc kubenswrapper[4784]: I1205 14:13:58.176156 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-96mtg_8a3d6fb3-6f9e-40b5-8de6-30f0588df3db/kube-rbac-proxy/0.log" Dec 05 14:13:58 crc kubenswrapper[4784]: I1205 14:13:58.207091 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-96mtg_8a3d6fb3-6f9e-40b5-8de6-30f0588df3db/nmstate-metrics/0.log" Dec 05 14:13:58 crc kubenswrapper[4784]: I1205 14:13:58.329973 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5zl2c_200a7d52-d7cb-4b5e-91a8-d03a2f181b01/nmstate-operator/0.log" Dec 05 14:13:58 crc kubenswrapper[4784]: I1205 14:13:58.383004 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-skw5g_081a21fa-325e-4018-9ce0-abc2bb1899ec/nmstate-webhook/0.log" Dec 05 14:14:04 crc kubenswrapper[4784]: I1205 14:14:04.998722 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:14:05 crc kubenswrapper[4784]: E1205 14:14:04.999620 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.258835 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-jgfs9_6bb837d7-0a54-4a2c-a943-70838b7b3d58/kube-rbac-proxy/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.330618 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-jgfs9_6bb837d7-0a54-4a2c-a943-70838b7b3d58/controller/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.439122 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-956ff_a65c75b7-3183-4839-a70e-d16e4776e89d/frr-k8s-webhook-server/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.528242 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.677786 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.714800 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.720083 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.723109 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.876957 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.894927 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.895681 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:14:12 crc kubenswrapper[4784]: I1205 14:14:12.905100 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.082391 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-metrics/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.085302 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/controller/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.086768 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-reloader/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.101754 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/cp-frr-files/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.290001 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/frr-metrics/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.311850 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/kube-rbac-proxy-frr/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.339201 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/kube-rbac-proxy/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.539746 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54655dd747-gqpqv_c5573d49-4a27-4dbb-ba09-0a6a3306e365/manager/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.579541 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/reloader/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.747301 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-849874cf66-slct4_27feea19-4a55-4d86-874a-60b62859a65c/webhook-server/0.log" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.981313 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:13 crc kubenswrapper[4784]: E1205 14:14:13.981915 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26dba194-96b9-4bea-b94f-0b917a324310" containerName="container-00" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.981938 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="26dba194-96b9-4bea-b94f-0b917a324310" containerName="container-00" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.982243 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="26dba194-96b9-4bea-b94f-0b917a324310" containerName="container-00" Dec 05 14:14:13 crc kubenswrapper[4784]: I1205 14:14:13.983844 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.020124 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.042478 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cffk7_f4d31217-a7d3-490f-8bba-c9d8ca4c47ca/kube-rbac-proxy/0.log" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.063075 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.063470 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8nmc\" (UniqueName: \"kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.064257 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.165827 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.165883 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.165964 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8nmc\" (UniqueName: \"kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.166346 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.166726 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.206853 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8nmc\" (UniqueName: \"kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc\") pod \"redhat-marketplace-hl6c5\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.320164 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.674542 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cffk7_f4d31217-a7d3-490f-8bba-c9d8ca4c47ca/speaker/0.log" Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.802721 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:14 crc kubenswrapper[4784]: I1205 14:14:14.886013 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerStarted","Data":"016101ca0966e6d2b8347307e7d6a53c409c907183962e4e9c1b8a05d3709328"} Dec 05 14:14:15 crc kubenswrapper[4784]: I1205 14:14:15.254809 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xjbhc_45594d33-f8da-48e8-b9c2-d60c96a98f64/frr/0.log" Dec 05 14:14:15 crc kubenswrapper[4784]: I1205 14:14:15.895371 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ffeedb4-feb4-4322-8aab-537531306330" containerID="b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf" exitCode=0 Dec 05 14:14:15 crc kubenswrapper[4784]: I1205 14:14:15.895419 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerDied","Data":"b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf"} Dec 05 14:14:15 crc kubenswrapper[4784]: I1205 14:14:15.897584 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 14:14:16 crc kubenswrapper[4784]: I1205 14:14:16.909180 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerStarted","Data":"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a"} Dec 05 14:14:17 crc kubenswrapper[4784]: I1205 14:14:17.921974 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ffeedb4-feb4-4322-8aab-537531306330" containerID="840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a" exitCode=0 Dec 05 14:14:17 crc kubenswrapper[4784]: I1205 14:14:17.922015 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerDied","Data":"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a"} Dec 05 14:14:18 crc kubenswrapper[4784]: I1205 14:14:18.942632 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerStarted","Data":"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992"} Dec 05 14:14:18 crc kubenswrapper[4784]: I1205 14:14:18.975346 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hl6c5" podStartSLOduration=3.333080426 podStartE2EDuration="5.975326882s" podCreationTimestamp="2025-12-05 14:14:13 +0000 UTC" firstStartedPulling="2025-12-05 14:14:15.897328766 +0000 UTC m=+6535.317395581" lastFinishedPulling="2025-12-05 14:14:18.539575222 +0000 UTC m=+6537.959642037" observedRunningTime="2025-12-05 14:14:18.963455493 +0000 UTC m=+6538.383522318" watchObservedRunningTime="2025-12-05 14:14:18.975326882 +0000 UTC m=+6538.395393697" Dec 05 14:14:19 crc kubenswrapper[4784]: I1205 14:14:18.999437 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:14:19 crc kubenswrapper[4784]: E1205 14:14:18.999688 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:14:24 crc kubenswrapper[4784]: I1205 14:14:24.321477 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:24 crc kubenswrapper[4784]: I1205 14:14:24.324010 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:24 crc kubenswrapper[4784]: I1205 14:14:24.369969 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:25 crc kubenswrapper[4784]: I1205 14:14:25.050153 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:25 crc kubenswrapper[4784]: I1205 14:14:25.109613 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.509916 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.730390 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.738171 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.908278 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/extract/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.927476 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.941313 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/util/0.log" Dec 05 14:14:26 crc kubenswrapper[4784]: I1205 14:14:26.941791 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fr965w_1efb6a43-d637-4f19-8514-ee271c6aea44/pull/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.010328 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hl6c5" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="registry-server" containerID="cri-o://984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992" gracePeriod=2 Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.106475 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.274507 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.320921 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.373939 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.511997 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.533156 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8nmc\" (UniqueName: \"kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc\") pod \"9ffeedb4-feb4-4322-8aab-537531306330\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.533470 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities\") pod \"9ffeedb4-feb4-4322-8aab-537531306330\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.533556 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content\") pod \"9ffeedb4-feb4-4322-8aab-537531306330\" (UID: \"9ffeedb4-feb4-4322-8aab-537531306330\") " Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.534371 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities" (OuterVolumeSpecName: "utilities") pod "9ffeedb4-feb4-4322-8aab-537531306330" (UID: "9ffeedb4-feb4-4322-8aab-537531306330"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.582034 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ffeedb4-feb4-4322-8aab-537531306330" (UID: "9ffeedb4-feb4-4322-8aab-537531306330"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.591590 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc" (OuterVolumeSpecName: "kube-api-access-q8nmc") pod "9ffeedb4-feb4-4322-8aab-537531306330" (UID: "9ffeedb4-feb4-4322-8aab-537531306330"). InnerVolumeSpecName "kube-api-access-q8nmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.635017 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.635049 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8nmc\" (UniqueName: \"kubernetes.io/projected/9ffeedb4-feb4-4322-8aab-537531306330-kube-api-access-q8nmc\") on node \"crc\" DevicePath \"\"" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.635059 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ffeedb4-feb4-4322-8aab-537531306330-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.640424 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/extract/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.683928 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/pull/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.690269 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104qz4c_06a04def-1da6-41fc-9aa1-9a6d5a2dcafb/util/0.log" Dec 05 14:14:27 crc kubenswrapper[4784]: I1205 14:14:27.814040 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.021867 4784 generic.go:334] "Generic (PLEG): container finished" podID="9ffeedb4-feb4-4322-8aab-537531306330" containerID="984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992" exitCode=0 Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.021937 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerDied","Data":"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992"} Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.021966 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl6c5" event={"ID":"9ffeedb4-feb4-4322-8aab-537531306330","Type":"ContainerDied","Data":"016101ca0966e6d2b8347307e7d6a53c409c907183962e4e9c1b8a05d3709328"} Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.021985 4784 scope.go:117] "RemoveContainer" containerID="984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.022012 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl6c5" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.054751 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.069208 4784 scope.go:117] "RemoveContainer" containerID="840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.084294 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.099433 4784 scope.go:117] "RemoveContainer" containerID="b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.105002 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.108736 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl6c5"] Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.110275 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.156262 4784 scope.go:117] "RemoveContainer" containerID="984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992" Dec 05 14:14:28 crc kubenswrapper[4784]: E1205 14:14:28.156761 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992\": container with ID starting with 984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992 not found: ID does not exist" containerID="984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.156794 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992"} err="failed to get container status \"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992\": rpc error: code = NotFound desc = could not find container \"984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992\": container with ID starting with 984f3a4e7a87ca7041714b649b1b560ec0a435c9a6ece124c464134e4a09f992 not found: ID does not exist" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.156817 4784 scope.go:117] "RemoveContainer" containerID="840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a" Dec 05 14:14:28 crc kubenswrapper[4784]: E1205 14:14:28.156973 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a\": container with ID starting with 840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a not found: ID does not exist" containerID="840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.156993 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a"} err="failed to get container status \"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a\": rpc error: code = NotFound desc = could not find container \"840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a\": container with ID starting with 840cb4d92b640ef88f6e47fac545831bcfa65b07a124a73d9e491c1ef6f72f3a not found: ID does not exist" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.157005 4784 scope.go:117] "RemoveContainer" containerID="b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf" Dec 05 14:14:28 crc kubenswrapper[4784]: E1205 14:14:28.157140 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf\": container with ID starting with b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf not found: ID does not exist" containerID="b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.157157 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf"} err="failed to get container status \"b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf\": rpc error: code = NotFound desc = could not find container \"b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf\": container with ID starting with b08061a743a9681017fd5b652c97f858d97ee4935f6e847ada5e6039d8ea5daf not found: ID does not exist" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.258600 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/pull/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.259472 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/util/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.305675 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83mz79j_a3027214-9fa3-4dd0-93f5-b3b316247c73/extract/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.446362 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.596327 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.644285 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.655257 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.781442 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-utilities/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.816753 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/extract-content/0.log" Dec 05 14:14:28 crc kubenswrapper[4784]: I1205 14:14:28.991671 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.019446 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ffeedb4-feb4-4322-8aab-537531306330" path="/var/lib/kubelet/pods/9ffeedb4-feb4-4322-8aab-537531306330/volumes" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.227928 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.272666 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.357537 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.557405 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-utilities/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.667968 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/extract-content/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.701977 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pbmwm_7ab4604a-3a19-4d0b-b6a0-b8d7274df317/registry-server/0.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.915454 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/1.log" Dec 05 14:14:29 crc kubenswrapper[4784]: I1205 14:14:29.999220 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:14:29 crc kubenswrapper[4784]: E1205 14:14:29.999491 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.074508 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfdl_4825dcfb-cf17-4a0d-b4f2-4f46c87beccb/marketplace-operator/2.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.228343 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.340855 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6trnd_55e564fa-612a-4e0b-bc29-09e5384fe16c/registry-server/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.447250 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.449119 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.480141 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.636695 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-utilities/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.646486 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/extract-content/0.log" Dec 05 14:14:30 crc kubenswrapper[4784]: I1205 14:14:30.855446 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hg45x_24a95ae6-0c84-4572-bfc4-5acb5295577c/registry-server/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.093396 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.275791 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.298086 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.306986 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.429101 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-utilities/0.log" Dec 05 14:14:31 crc kubenswrapper[4784]: I1205 14:14:31.455888 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/extract-content/0.log" Dec 05 14:14:32 crc kubenswrapper[4784]: I1205 14:14:32.173098 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xlw8f_564caa58-786b-44bd-96a5-963c2e8343f7/registry-server/0.log" Dec 05 14:14:41 crc kubenswrapper[4784]: I1205 14:14:41.006499 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:14:41 crc kubenswrapper[4784]: E1205 14:14:41.008543 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:14:43 crc kubenswrapper[4784]: I1205 14:14:43.305622 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-q5lxp_954cb856-d909-4541-89c7-7c38bf8d8618/prometheus-operator/0.log" Dec 05 14:14:43 crc kubenswrapper[4784]: I1205 14:14:43.432100 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848d96db67-6k7p8_573cb676-d704-4d0d-852c-582d38a64cdb/prometheus-operator-admission-webhook/0.log" Dec 05 14:14:43 crc kubenswrapper[4784]: I1205 14:14:43.477897 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-848d96db67-869qk_e6ba5df2-9910-453c-9993-fca6642b4e8e/prometheus-operator-admission-webhook/0.log" Dec 05 14:14:43 crc kubenswrapper[4784]: I1205 14:14:43.619404 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-m29qp_bdf43736-9e51-4d7d-8290-075b7f058f62/operator/0.log" Dec 05 14:14:43 crc kubenswrapper[4784]: I1205 14:14:43.688144 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-6j8bm_a56fb787-b445-49b8-a50c-5cddf822fc68/perses-operator/0.log" Dec 05 14:14:54 crc kubenswrapper[4784]: I1205 14:14:54.999362 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:14:55 crc kubenswrapper[4784]: E1205 14:14:55.001148 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.162948 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8"] Dec 05 14:15:00 crc kubenswrapper[4784]: E1205 14:15:00.164450 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="extract-content" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.164505 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="extract-content" Dec 05 14:15:00 crc kubenswrapper[4784]: E1205 14:15:00.164527 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="registry-server" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.164534 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="registry-server" Dec 05 14:15:00 crc kubenswrapper[4784]: E1205 14:15:00.164561 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="extract-utilities" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.164571 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="extract-utilities" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.164784 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ffeedb4-feb4-4322-8aab-537531306330" containerName="registry-server" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.165481 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.167810 4784 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.167947 4784 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.176324 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8"] Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.221439 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.221500 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fk6x\" (UniqueName: \"kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.221574 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.323974 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.324045 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fk6x\" (UniqueName: \"kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.324105 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.325221 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.338743 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.343780 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fk6x\" (UniqueName: \"kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x\") pod \"collect-profiles-29415735-2d6p8\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:00 crc kubenswrapper[4784]: I1205 14:15:00.531721 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:01 crc kubenswrapper[4784]: I1205 14:15:01.085733 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8"] Dec 05 14:15:01 crc kubenswrapper[4784]: I1205 14:15:01.369683 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" event={"ID":"338f6f98-0f02-4c15-92b8-38fd842df297","Type":"ContainerStarted","Data":"dd3563ed1c5e17a11d0f68a8b91581ec1de7476e9b24c888d415cac847726609"} Dec 05 14:15:01 crc kubenswrapper[4784]: I1205 14:15:01.369789 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" event={"ID":"338f6f98-0f02-4c15-92b8-38fd842df297","Type":"ContainerStarted","Data":"dfb902b4d9d0420557098dfdddc5b39f636c778b7093c338e6a297730d573604"} Dec 05 14:15:01 crc kubenswrapper[4784]: I1205 14:15:01.386752 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" podStartSLOduration=1.386731251 podStartE2EDuration="1.386731251s" podCreationTimestamp="2025-12-05 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 14:15:01.384903665 +0000 UTC m=+6580.804970480" watchObservedRunningTime="2025-12-05 14:15:01.386731251 +0000 UTC m=+6580.806798066" Dec 05 14:15:02 crc kubenswrapper[4784]: I1205 14:15:02.380916 4784 generic.go:334] "Generic (PLEG): container finished" podID="338f6f98-0f02-4c15-92b8-38fd842df297" containerID="dd3563ed1c5e17a11d0f68a8b91581ec1de7476e9b24c888d415cac847726609" exitCode=0 Dec 05 14:15:02 crc kubenswrapper[4784]: I1205 14:15:02.381007 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" event={"ID":"338f6f98-0f02-4c15-92b8-38fd842df297","Type":"ContainerDied","Data":"dd3563ed1c5e17a11d0f68a8b91581ec1de7476e9b24c888d415cac847726609"} Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.812199 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.919995 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume\") pod \"338f6f98-0f02-4c15-92b8-38fd842df297\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.920145 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fk6x\" (UniqueName: \"kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x\") pod \"338f6f98-0f02-4c15-92b8-38fd842df297\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.920378 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume\") pod \"338f6f98-0f02-4c15-92b8-38fd842df297\" (UID: \"338f6f98-0f02-4c15-92b8-38fd842df297\") " Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.921147 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume" (OuterVolumeSpecName: "config-volume") pod "338f6f98-0f02-4c15-92b8-38fd842df297" (UID: "338f6f98-0f02-4c15-92b8-38fd842df297"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.927455 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x" (OuterVolumeSpecName: "kube-api-access-5fk6x") pod "338f6f98-0f02-4c15-92b8-38fd842df297" (UID: "338f6f98-0f02-4c15-92b8-38fd842df297"). InnerVolumeSpecName "kube-api-access-5fk6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:15:03 crc kubenswrapper[4784]: I1205 14:15:03.937781 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "338f6f98-0f02-4c15-92b8-38fd842df297" (UID: "338f6f98-0f02-4c15-92b8-38fd842df297"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.022453 4784 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/338f6f98-0f02-4c15-92b8-38fd842df297-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.022486 4784 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/338f6f98-0f02-4c15-92b8-38fd842df297-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.022497 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fk6x\" (UniqueName: \"kubernetes.io/projected/338f6f98-0f02-4c15-92b8-38fd842df297-kube-api-access-5fk6x\") on node \"crc\" DevicePath \"\"" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.404531 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" event={"ID":"338f6f98-0f02-4c15-92b8-38fd842df297","Type":"ContainerDied","Data":"dfb902b4d9d0420557098dfdddc5b39f636c778b7093c338e6a297730d573604"} Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.404922 4784 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfb902b4d9d0420557098dfdddc5b39f636c778b7093c338e6a297730d573604" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.404592 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415735-2d6p8" Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.468543 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl"] Dec 05 14:15:04 crc kubenswrapper[4784]: I1205 14:15:04.481559 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415690-6d4tl"] Dec 05 14:15:05 crc kubenswrapper[4784]: I1205 14:15:05.012742 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66ab9e30-b279-4ffd-a2df-380f69151467" path="/var/lib/kubelet/pods/66ab9e30-b279-4ffd-a2df-380f69151467/volumes" Dec 05 14:15:06 crc kubenswrapper[4784]: I1205 14:15:06.999248 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:15:07 crc kubenswrapper[4784]: E1205 14:15:07.000010 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:15:19 crc kubenswrapper[4784]: I1205 14:15:18.999495 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:15:19 crc kubenswrapper[4784]: E1205 14:15:19.000875 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:15:31 crc kubenswrapper[4784]: I1205 14:15:31.010494 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:15:31 crc kubenswrapper[4784]: E1205 14:15:31.011305 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:15:41 crc kubenswrapper[4784]: I1205 14:15:41.376091 4784 scope.go:117] "RemoveContainer" containerID="56b4d1a2c2a3b3c1e79a15d2cc3cfdfba6af1297c540984e1575269e209520b3" Dec 05 14:15:42 crc kubenswrapper[4784]: I1205 14:15:42.000060 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:15:42 crc kubenswrapper[4784]: E1205 14:15:42.000375 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:15:53 crc kubenswrapper[4784]: I1205 14:15:53.998759 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:15:54 crc kubenswrapper[4784]: E1205 14:15:53.999680 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:16:05 crc kubenswrapper[4784]: I1205 14:16:05.999218 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:16:06 crc kubenswrapper[4784]: E1205 14:16:05.999754 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.312391 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:10 crc kubenswrapper[4784]: E1205 14:16:10.315023 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338f6f98-0f02-4c15-92b8-38fd842df297" containerName="collect-profiles" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.315074 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="338f6f98-0f02-4c15-92b8-38fd842df297" containerName="collect-profiles" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.315608 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="338f6f98-0f02-4c15-92b8-38fd842df297" containerName="collect-profiles" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.321473 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.326289 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.438453 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-469ww\" (UniqueName: \"kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.438547 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.438625 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.540544 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-469ww\" (UniqueName: \"kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.540615 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.540653 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.541159 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.541381 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.562398 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-469ww\" (UniqueName: \"kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww\") pod \"certified-operators-qg74c\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:10 crc kubenswrapper[4784]: I1205 14:16:10.663846 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.222447 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.714906 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.717422 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.730053 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.781745 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.782050 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.782139 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vw2v\" (UniqueName: \"kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.885335 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.885471 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.885520 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vw2v\" (UniqueName: \"kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.886477 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.886762 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:11 crc kubenswrapper[4784]: I1205 14:16:11.907164 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vw2v\" (UniqueName: \"kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v\") pod \"community-operators-2gq5g\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:12 crc kubenswrapper[4784]: I1205 14:16:12.049754 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:12 crc kubenswrapper[4784]: I1205 14:16:12.067898 4784 generic.go:334] "Generic (PLEG): container finished" podID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerID="3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71" exitCode=0 Dec 05 14:16:12 crc kubenswrapper[4784]: I1205 14:16:12.067950 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerDied","Data":"3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71"} Dec 05 14:16:12 crc kubenswrapper[4784]: I1205 14:16:12.067981 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerStarted","Data":"7d90488645e15708e3012cf607709229a3ec2925ee0edf4538abb75b33cc7cf3"} Dec 05 14:16:12 crc kubenswrapper[4784]: I1205 14:16:12.678337 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:13 crc kubenswrapper[4784]: I1205 14:16:13.079964 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerStarted","Data":"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f"} Dec 05 14:16:13 crc kubenswrapper[4784]: I1205 14:16:13.081216 4784 generic.go:334] "Generic (PLEG): container finished" podID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerID="2915715a778954b200070cbb4e150d0be78b9f4e8c15a61df6149f9f98f80cdc" exitCode=0 Dec 05 14:16:13 crc kubenswrapper[4784]: I1205 14:16:13.081255 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerDied","Data":"2915715a778954b200070cbb4e150d0be78b9f4e8c15a61df6149f9f98f80cdc"} Dec 05 14:16:13 crc kubenswrapper[4784]: I1205 14:16:13.081296 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerStarted","Data":"8c17a877956828351a47e9efed1ed2e3ab25771261323a7e0876d6c46343b9b9"} Dec 05 14:16:14 crc kubenswrapper[4784]: I1205 14:16:14.093852 4784 generic.go:334] "Generic (PLEG): container finished" podID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerID="f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f" exitCode=0 Dec 05 14:16:14 crc kubenswrapper[4784]: I1205 14:16:14.093923 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerDied","Data":"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f"} Dec 05 14:16:14 crc kubenswrapper[4784]: I1205 14:16:14.096608 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerStarted","Data":"80820cb53eb2b1a619fb6208a5734c884ac3f382b791f2cbbd8e23c67b136459"} Dec 05 14:16:15 crc kubenswrapper[4784]: I1205 14:16:15.116175 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerStarted","Data":"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c"} Dec 05 14:16:15 crc kubenswrapper[4784]: I1205 14:16:15.121630 4784 generic.go:334] "Generic (PLEG): container finished" podID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerID="80820cb53eb2b1a619fb6208a5734c884ac3f382b791f2cbbd8e23c67b136459" exitCode=0 Dec 05 14:16:15 crc kubenswrapper[4784]: I1205 14:16:15.121720 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerDied","Data":"80820cb53eb2b1a619fb6208a5734c884ac3f382b791f2cbbd8e23c67b136459"} Dec 05 14:16:15 crc kubenswrapper[4784]: I1205 14:16:15.146727 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qg74c" podStartSLOduration=2.743116278 podStartE2EDuration="5.146701832s" podCreationTimestamp="2025-12-05 14:16:10 +0000 UTC" firstStartedPulling="2025-12-05 14:16:12.084084454 +0000 UTC m=+6651.504151259" lastFinishedPulling="2025-12-05 14:16:14.487669988 +0000 UTC m=+6653.907736813" observedRunningTime="2025-12-05 14:16:15.136898416 +0000 UTC m=+6654.556965271" watchObservedRunningTime="2025-12-05 14:16:15.146701832 +0000 UTC m=+6654.566768647" Dec 05 14:16:16 crc kubenswrapper[4784]: I1205 14:16:16.133985 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerStarted","Data":"7d631bdc42cd8db6c8ca0b1f83394e66ca115990df531dc38d74c8d4d7355e68"} Dec 05 14:16:16 crc kubenswrapper[4784]: I1205 14:16:16.151812 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2gq5g" podStartSLOduration=2.706765283 podStartE2EDuration="5.151795079s" podCreationTimestamp="2025-12-05 14:16:11 +0000 UTC" firstStartedPulling="2025-12-05 14:16:13.090772011 +0000 UTC m=+6652.510838826" lastFinishedPulling="2025-12-05 14:16:15.535801797 +0000 UTC m=+6654.955868622" observedRunningTime="2025-12-05 14:16:16.150033654 +0000 UTC m=+6655.570100469" watchObservedRunningTime="2025-12-05 14:16:16.151795079 +0000 UTC m=+6655.571861894" Dec 05 14:16:17 crc kubenswrapper[4784]: I1205 14:16:17.999577 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:16:18 crc kubenswrapper[4784]: E1205 14:16:18.000160 4784 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sx8lm_openshift-machine-config-operator(be412f31-7a36-4811-8914-be8cdc987d08)\"" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" Dec 05 14:16:20 crc kubenswrapper[4784]: I1205 14:16:20.664739 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:20 crc kubenswrapper[4784]: I1205 14:16:20.665131 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:20 crc kubenswrapper[4784]: I1205 14:16:20.728130 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:21 crc kubenswrapper[4784]: I1205 14:16:21.223012 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:21 crc kubenswrapper[4784]: I1205 14:16:21.899795 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:22 crc kubenswrapper[4784]: I1205 14:16:22.050282 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:22 crc kubenswrapper[4784]: I1205 14:16:22.050343 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:22 crc kubenswrapper[4784]: I1205 14:16:22.101797 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:22 crc kubenswrapper[4784]: I1205 14:16:22.234148 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.194844 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qg74c" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="registry-server" containerID="cri-o://b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c" gracePeriod=2 Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.653030 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.749100 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities\") pod \"2e84671d-c612-4ea6-b318-4ff47f8532f0\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.749369 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-469ww\" (UniqueName: \"kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww\") pod \"2e84671d-c612-4ea6-b318-4ff47f8532f0\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.749553 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content\") pod \"2e84671d-c612-4ea6-b318-4ff47f8532f0\" (UID: \"2e84671d-c612-4ea6-b318-4ff47f8532f0\") " Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.751262 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities" (OuterVolumeSpecName: "utilities") pod "2e84671d-c612-4ea6-b318-4ff47f8532f0" (UID: "2e84671d-c612-4ea6-b318-4ff47f8532f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.760247 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww" (OuterVolumeSpecName: "kube-api-access-469ww") pod "2e84671d-c612-4ea6-b318-4ff47f8532f0" (UID: "2e84671d-c612-4ea6-b318-4ff47f8532f0"). InnerVolumeSpecName "kube-api-access-469ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.826683 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e84671d-c612-4ea6-b318-4ff47f8532f0" (UID: "2e84671d-c612-4ea6-b318-4ff47f8532f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.852368 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-469ww\" (UniqueName: \"kubernetes.io/projected/2e84671d-c612-4ea6-b318-4ff47f8532f0-kube-api-access-469ww\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.852405 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:23 crc kubenswrapper[4784]: I1205 14:16:23.852414 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e84671d-c612-4ea6-b318-4ff47f8532f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.212095 4784 generic.go:334] "Generic (PLEG): container finished" podID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerID="b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c" exitCode=0 Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.212161 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerDied","Data":"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c"} Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.212244 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qg74c" event={"ID":"2e84671d-c612-4ea6-b318-4ff47f8532f0","Type":"ContainerDied","Data":"7d90488645e15708e3012cf607709229a3ec2925ee0edf4538abb75b33cc7cf3"} Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.212288 4784 scope.go:117] "RemoveContainer" containerID="b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.212498 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qg74c" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.258442 4784 scope.go:117] "RemoveContainer" containerID="f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.281201 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.285104 4784 scope.go:117] "RemoveContainer" containerID="3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.300139 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qg74c"] Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.338076 4784 scope.go:117] "RemoveContainer" containerID="b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c" Dec 05 14:16:24 crc kubenswrapper[4784]: E1205 14:16:24.338492 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c\": container with ID starting with b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c not found: ID does not exist" containerID="b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.338530 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c"} err="failed to get container status \"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c\": rpc error: code = NotFound desc = could not find container \"b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c\": container with ID starting with b3f6ad0d5060fa77f1edace9602adcadb742da3d82175baf461e5dea2dc3117c not found: ID does not exist" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.338551 4784 scope.go:117] "RemoveContainer" containerID="f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f" Dec 05 14:16:24 crc kubenswrapper[4784]: E1205 14:16:24.339031 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f\": container with ID starting with f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f not found: ID does not exist" containerID="f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.339058 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f"} err="failed to get container status \"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f\": rpc error: code = NotFound desc = could not find container \"f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f\": container with ID starting with f922f5b9bf82f2666c7aff7fb0d49ab3c16219234687d6506fe6ce80d14c226f not found: ID does not exist" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.339073 4784 scope.go:117] "RemoveContainer" containerID="3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71" Dec 05 14:16:24 crc kubenswrapper[4784]: E1205 14:16:24.339436 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71\": container with ID starting with 3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71 not found: ID does not exist" containerID="3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.339481 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71"} err="failed to get container status \"3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71\": rpc error: code = NotFound desc = could not find container \"3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71\": container with ID starting with 3a6143763b51ec828e6131f63324f6777107401791d7d72738cf7a507bee5a71 not found: ID does not exist" Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.502943 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:24 crc kubenswrapper[4784]: I1205 14:16:24.503167 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2gq5g" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="registry-server" containerID="cri-o://7d631bdc42cd8db6c8ca0b1f83394e66ca115990df531dc38d74c8d4d7355e68" gracePeriod=2 Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.015172 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" path="/var/lib/kubelet/pods/2e84671d-c612-4ea6-b318-4ff47f8532f0/volumes" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.244254 4784 generic.go:334] "Generic (PLEG): container finished" podID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerID="7d631bdc42cd8db6c8ca0b1f83394e66ca115990df531dc38d74c8d4d7355e68" exitCode=0 Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.244309 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerDied","Data":"7d631bdc42cd8db6c8ca0b1f83394e66ca115990df531dc38d74c8d4d7355e68"} Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.490998 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.589621 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content\") pod \"c125123b-b8ef-4cef-90df-c56b4300efa5\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.589774 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vw2v\" (UniqueName: \"kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v\") pod \"c125123b-b8ef-4cef-90df-c56b4300efa5\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.589889 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities\") pod \"c125123b-b8ef-4cef-90df-c56b4300efa5\" (UID: \"c125123b-b8ef-4cef-90df-c56b4300efa5\") " Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.590946 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities" (OuterVolumeSpecName: "utilities") pod "c125123b-b8ef-4cef-90df-c56b4300efa5" (UID: "c125123b-b8ef-4cef-90df-c56b4300efa5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.600013 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v" (OuterVolumeSpecName: "kube-api-access-8vw2v") pod "c125123b-b8ef-4cef-90df-c56b4300efa5" (UID: "c125123b-b8ef-4cef-90df-c56b4300efa5"). InnerVolumeSpecName "kube-api-access-8vw2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.642761 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c125123b-b8ef-4cef-90df-c56b4300efa5" (UID: "c125123b-b8ef-4cef-90df-c56b4300efa5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.692435 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.692465 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c125123b-b8ef-4cef-90df-c56b4300efa5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:25 crc kubenswrapper[4784]: I1205 14:16:25.692479 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vw2v\" (UniqueName: \"kubernetes.io/projected/c125123b-b8ef-4cef-90df-c56b4300efa5-kube-api-access-8vw2v\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.255152 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gq5g" event={"ID":"c125123b-b8ef-4cef-90df-c56b4300efa5","Type":"ContainerDied","Data":"8c17a877956828351a47e9efed1ed2e3ab25771261323a7e0876d6c46343b9b9"} Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.255228 4784 scope.go:117] "RemoveContainer" containerID="7d631bdc42cd8db6c8ca0b1f83394e66ca115990df531dc38d74c8d4d7355e68" Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.255228 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gq5g" Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.278852 4784 scope.go:117] "RemoveContainer" containerID="80820cb53eb2b1a619fb6208a5734c884ac3f382b791f2cbbd8e23c67b136459" Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.303173 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.315111 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2gq5g"] Dec 05 14:16:26 crc kubenswrapper[4784]: I1205 14:16:26.326684 4784 scope.go:117] "RemoveContainer" containerID="2915715a778954b200070cbb4e150d0be78b9f4e8c15a61df6149f9f98f80cdc" Dec 05 14:16:27 crc kubenswrapper[4784]: I1205 14:16:27.011962 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" path="/var/lib/kubelet/pods/c125123b-b8ef-4cef-90df-c56b4300efa5/volumes" Dec 05 14:16:30 crc kubenswrapper[4784]: I1205 14:16:30.002152 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" Dec 05 14:16:30 crc kubenswrapper[4784]: I1205 14:16:30.304395 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"9d7437b11bd20c6f0cf3a6baeacf4a1fca641265e6807e20d74aa61337dd4734"} Dec 05 14:16:44 crc kubenswrapper[4784]: I1205 14:16:44.456784 4784 generic.go:334] "Generic (PLEG): container finished" podID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerID="15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63" exitCode=0 Dec 05 14:16:44 crc kubenswrapper[4784]: I1205 14:16:44.456892 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" event={"ID":"a7110865-6dae-444b-8077-bb2d8cb2fc60","Type":"ContainerDied","Data":"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63"} Dec 05 14:16:44 crc kubenswrapper[4784]: I1205 14:16:44.458059 4784 scope.go:117] "RemoveContainer" containerID="15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63" Dec 05 14:16:44 crc kubenswrapper[4784]: I1205 14:16:44.644061 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5w2dk_must-gather-k4sxt_a7110865-6dae-444b-8077-bb2d8cb2fc60/gather/0.log" Dec 05 14:16:55 crc kubenswrapper[4784]: I1205 14:16:55.847641 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5w2dk/must-gather-k4sxt"] Dec 05 14:16:55 crc kubenswrapper[4784]: I1205 14:16:55.848453 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="copy" containerID="cri-o://75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f" gracePeriod=2 Dec 05 14:16:55 crc kubenswrapper[4784]: I1205 14:16:55.857872 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5w2dk/must-gather-k4sxt"] Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.307404 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5w2dk_must-gather-k4sxt_a7110865-6dae-444b-8077-bb2d8cb2fc60/copy/0.log" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.308113 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.431075 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltx7c\" (UniqueName: \"kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c\") pod \"a7110865-6dae-444b-8077-bb2d8cb2fc60\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.431407 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output\") pod \"a7110865-6dae-444b-8077-bb2d8cb2fc60\" (UID: \"a7110865-6dae-444b-8077-bb2d8cb2fc60\") " Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.438876 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c" (OuterVolumeSpecName: "kube-api-access-ltx7c") pod "a7110865-6dae-444b-8077-bb2d8cb2fc60" (UID: "a7110865-6dae-444b-8077-bb2d8cb2fc60"). InnerVolumeSpecName "kube-api-access-ltx7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.534648 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltx7c\" (UniqueName: \"kubernetes.io/projected/a7110865-6dae-444b-8077-bb2d8cb2fc60-kube-api-access-ltx7c\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.579994 4784 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5w2dk_must-gather-k4sxt_a7110865-6dae-444b-8077-bb2d8cb2fc60/copy/0.log" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.580868 4784 generic.go:334] "Generic (PLEG): container finished" podID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerID="75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f" exitCode=143 Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.580922 4784 scope.go:117] "RemoveContainer" containerID="75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.580940 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5w2dk/must-gather-k4sxt" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.603042 4784 scope.go:117] "RemoveContainer" containerID="15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.631840 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a7110865-6dae-444b-8077-bb2d8cb2fc60" (UID: "a7110865-6dae-444b-8077-bb2d8cb2fc60"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.636666 4784 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a7110865-6dae-444b-8077-bb2d8cb2fc60-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.686849 4784 scope.go:117] "RemoveContainer" containerID="75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f" Dec 05 14:16:56 crc kubenswrapper[4784]: E1205 14:16:56.687356 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f\": container with ID starting with 75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f not found: ID does not exist" containerID="75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.687393 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f"} err="failed to get container status \"75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f\": rpc error: code = NotFound desc = could not find container \"75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f\": container with ID starting with 75128942c38cb68b3182fd616e686f3401d36403685b30dca9c34ba8537b0e7f not found: ID does not exist" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.687419 4784 scope.go:117] "RemoveContainer" containerID="15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63" Dec 05 14:16:56 crc kubenswrapper[4784]: E1205 14:16:56.687727 4784 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63\": container with ID starting with 15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63 not found: ID does not exist" containerID="15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63" Dec 05 14:16:56 crc kubenswrapper[4784]: I1205 14:16:56.687847 4784 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63"} err="failed to get container status \"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63\": rpc error: code = NotFound desc = could not find container \"15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63\": container with ID starting with 15f88dc54641fabccbda6a07ba0e6ccf47e50a97fc550e5b1a431b3b95dacd63 not found: ID does not exist" Dec 05 14:16:57 crc kubenswrapper[4784]: I1205 14:16:57.011116 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" path="/var/lib/kubelet/pods/a7110865-6dae-444b-8077-bb2d8cb2fc60/volumes" Dec 05 14:18:41 crc kubenswrapper[4784]: I1205 14:18:41.512257 4784 scope.go:117] "RemoveContainer" containerID="823dd0b7d810f5cf6155288cb86ba47e939abad6dd9bf6b3fa0cfa792dade736" Dec 05 14:18:59 crc kubenswrapper[4784]: I1205 14:18:59.572291 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:18:59 crc kubenswrapper[4784]: I1205 14:18:59.572762 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.490613 4784 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494470 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="copy" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494536 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="copy" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494614 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="gather" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494634 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="gather" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494666 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="extract-utilities" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494682 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="extract-utilities" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494722 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494740 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494774 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="extract-content" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494790 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="extract-content" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494818 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494834 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494867 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="extract-utilities" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494882 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="extract-utilities" Dec 05 14:19:23 crc kubenswrapper[4784]: E1205 14:19:23.494926 4784 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="extract-content" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.494942 4784 state_mem.go:107] "Deleted CPUSet assignment" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="extract-content" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.495450 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="copy" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.495494 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e84671d-c612-4ea6-b318-4ff47f8532f0" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.495533 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7110865-6dae-444b-8077-bb2d8cb2fc60" containerName="gather" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.495567 4784 memory_manager.go:354] "RemoveStaleState removing state" podUID="c125123b-b8ef-4cef-90df-c56b4300efa5" containerName="registry-server" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.499065 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.505068 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.681637 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lk4s\" (UniqueName: \"kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.682017 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.682251 4784 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.784094 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.784552 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lk4s\" (UniqueName: \"kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.784627 4784 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.785081 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.785173 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.814997 4784 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lk4s\" (UniqueName: \"kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s\") pod \"redhat-operators-jht8c\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:23 crc kubenswrapper[4784]: I1205 14:19:23.841235 4784 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:24 crc kubenswrapper[4784]: I1205 14:19:24.358841 4784 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:25 crc kubenswrapper[4784]: I1205 14:19:25.091844 4784 generic.go:334] "Generic (PLEG): container finished" podID="3458e0fc-e2b4-4bce-9926-44e8fa309bc4" containerID="8c17c72dde4ba065df2405136e989fd39ca6b64861b25a8b5f00e2ddea3968d0" exitCode=0 Dec 05 14:19:25 crc kubenswrapper[4784]: I1205 14:19:25.092257 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerDied","Data":"8c17c72dde4ba065df2405136e989fd39ca6b64861b25a8b5f00e2ddea3968d0"} Dec 05 14:19:25 crc kubenswrapper[4784]: I1205 14:19:25.092299 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerStarted","Data":"a9c99d48d18a2498eb2556bb6df4903a23c1758bd6fba521a42e14e280ccb2e0"} Dec 05 14:19:25 crc kubenswrapper[4784]: I1205 14:19:25.094859 4784 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 14:19:26 crc kubenswrapper[4784]: I1205 14:19:26.105401 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerStarted","Data":"95404d264b76e71f2003c53077c40fd27b7c5880e592884a4dc32b136d61fa11"} Dec 05 14:19:27 crc kubenswrapper[4784]: I1205 14:19:27.117446 4784 generic.go:334] "Generic (PLEG): container finished" podID="3458e0fc-e2b4-4bce-9926-44e8fa309bc4" containerID="95404d264b76e71f2003c53077c40fd27b7c5880e592884a4dc32b136d61fa11" exitCode=0 Dec 05 14:19:27 crc kubenswrapper[4784]: I1205 14:19:27.117516 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerDied","Data":"95404d264b76e71f2003c53077c40fd27b7c5880e592884a4dc32b136d61fa11"} Dec 05 14:19:28 crc kubenswrapper[4784]: I1205 14:19:28.130857 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerStarted","Data":"3ec7a1f0b96874990224c8a733e15d3086eb0ce64bad216f04b91a20683f4755"} Dec 05 14:19:28 crc kubenswrapper[4784]: I1205 14:19:28.160714 4784 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jht8c" podStartSLOduration=2.6923680770000002 podStartE2EDuration="5.160691081s" podCreationTimestamp="2025-12-05 14:19:23 +0000 UTC" firstStartedPulling="2025-12-05 14:19:25.094528532 +0000 UTC m=+6844.514595347" lastFinishedPulling="2025-12-05 14:19:27.562851536 +0000 UTC m=+6846.982918351" observedRunningTime="2025-12-05 14:19:28.157524232 +0000 UTC m=+6847.577591057" watchObservedRunningTime="2025-12-05 14:19:28.160691081 +0000 UTC m=+6847.580757896" Dec 05 14:19:29 crc kubenswrapper[4784]: I1205 14:19:29.572756 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:19:29 crc kubenswrapper[4784]: I1205 14:19:29.573307 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:19:33 crc kubenswrapper[4784]: I1205 14:19:33.842141 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:33 crc kubenswrapper[4784]: I1205 14:19:33.842617 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:33 crc kubenswrapper[4784]: I1205 14:19:33.907543 4784 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:34 crc kubenswrapper[4784]: I1205 14:19:34.260351 4784 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:34 crc kubenswrapper[4784]: I1205 14:19:34.323022 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:36 crc kubenswrapper[4784]: I1205 14:19:36.218209 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jht8c" podUID="3458e0fc-e2b4-4bce-9926-44e8fa309bc4" containerName="registry-server" containerID="cri-o://3ec7a1f0b96874990224c8a733e15d3086eb0ce64bad216f04b91a20683f4755" gracePeriod=2 Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.251570 4784 generic.go:334] "Generic (PLEG): container finished" podID="3458e0fc-e2b4-4bce-9926-44e8fa309bc4" containerID="3ec7a1f0b96874990224c8a733e15d3086eb0ce64bad216f04b91a20683f4755" exitCode=0 Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.251807 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerDied","Data":"3ec7a1f0b96874990224c8a733e15d3086eb0ce64bad216f04b91a20683f4755"} Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.612556 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.728594 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities\") pod \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.728981 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lk4s\" (UniqueName: \"kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s\") pod \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.729051 4784 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content\") pod \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\" (UID: \"3458e0fc-e2b4-4bce-9926-44e8fa309bc4\") " Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.730655 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities" (OuterVolumeSpecName: "utilities") pod "3458e0fc-e2b4-4bce-9926-44e8fa309bc4" (UID: "3458e0fc-e2b4-4bce-9926-44e8fa309bc4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.734967 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s" (OuterVolumeSpecName: "kube-api-access-6lk4s") pod "3458e0fc-e2b4-4bce-9926-44e8fa309bc4" (UID: "3458e0fc-e2b4-4bce-9926-44e8fa309bc4"). InnerVolumeSpecName "kube-api-access-6lk4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.833646 4784 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lk4s\" (UniqueName: \"kubernetes.io/projected/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-kube-api-access-6lk4s\") on node \"crc\" DevicePath \"\"" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.833708 4784 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.863149 4784 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3458e0fc-e2b4-4bce-9926-44e8fa309bc4" (UID: "3458e0fc-e2b4-4bce-9926-44e8fa309bc4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:19:38 crc kubenswrapper[4784]: I1205 14:19:38.936063 4784 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3458e0fc-e2b4-4bce-9926-44e8fa309bc4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.324325 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jht8c" event={"ID":"3458e0fc-e2b4-4bce-9926-44e8fa309bc4","Type":"ContainerDied","Data":"a9c99d48d18a2498eb2556bb6df4903a23c1758bd6fba521a42e14e280ccb2e0"} Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.324383 4784 scope.go:117] "RemoveContainer" containerID="3ec7a1f0b96874990224c8a733e15d3086eb0ce64bad216f04b91a20683f4755" Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.324543 4784 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jht8c" Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.361757 4784 scope.go:117] "RemoveContainer" containerID="95404d264b76e71f2003c53077c40fd27b7c5880e592884a4dc32b136d61fa11" Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.364371 4784 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.375944 4784 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jht8c"] Dec 05 14:19:39 crc kubenswrapper[4784]: I1205 14:19:39.393013 4784 scope.go:117] "RemoveContainer" containerID="8c17c72dde4ba065df2405136e989fd39ca6b64861b25a8b5f00e2ddea3968d0" Dec 05 14:19:41 crc kubenswrapper[4784]: I1205 14:19:41.011752 4784 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3458e0fc-e2b4-4bce-9926-44e8fa309bc4" path="/var/lib/kubelet/pods/3458e0fc-e2b4-4bce-9926-44e8fa309bc4/volumes" Dec 05 14:19:59 crc kubenswrapper[4784]: I1205 14:19:59.572287 4784 patch_prober.go:28] interesting pod/machine-config-daemon-sx8lm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 14:19:59 crc kubenswrapper[4784]: I1205 14:19:59.572895 4784 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 14:19:59 crc kubenswrapper[4784]: I1205 14:19:59.572935 4784 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" Dec 05 14:19:59 crc kubenswrapper[4784]: I1205 14:19:59.573749 4784 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9d7437b11bd20c6f0cf3a6baeacf4a1fca641265e6807e20d74aa61337dd4734"} pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 14:19:59 crc kubenswrapper[4784]: I1205 14:19:59.573803 4784 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" podUID="be412f31-7a36-4811-8914-be8cdc987d08" containerName="machine-config-daemon" containerID="cri-o://9d7437b11bd20c6f0cf3a6baeacf4a1fca641265e6807e20d74aa61337dd4734" gracePeriod=600 Dec 05 14:20:00 crc kubenswrapper[4784]: I1205 14:20:00.532145 4784 generic.go:334] "Generic (PLEG): container finished" podID="be412f31-7a36-4811-8914-be8cdc987d08" containerID="9d7437b11bd20c6f0cf3a6baeacf4a1fca641265e6807e20d74aa61337dd4734" exitCode=0 Dec 05 14:20:00 crc kubenswrapper[4784]: I1205 14:20:00.532242 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerDied","Data":"9d7437b11bd20c6f0cf3a6baeacf4a1fca641265e6807e20d74aa61337dd4734"} Dec 05 14:20:00 crc kubenswrapper[4784]: I1205 14:20:00.532901 4784 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sx8lm" event={"ID":"be412f31-7a36-4811-8914-be8cdc987d08","Type":"ContainerStarted","Data":"95f05f0f6b808b612289c4f3af9e0548785deb978624b8c7377857b62d8277c2"} Dec 05 14:20:00 crc kubenswrapper[4784]: I1205 14:20:00.532930 4784 scope.go:117] "RemoveContainer" containerID="30bb470db62f03f9ad108e27b3986b159c5260ee961be043b95823d3a75eb7f2" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114565206024452 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114565207017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114547115016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114547115015461 5ustar corecore